Loading the data

carbonData<-read.csv('/Users/angadsingh/Downloads/Carbon Emission.csv')
summary(carbonData)
  Body.Type             Sex                Diet           How.Often.Shower   Heating.Energy.Source  Transport         Vehicle.Type       Social.Activity   
 Length:10000       Length:10000       Length:10000       Length:10000       Length:10000          Length:10000       Length:10000       Length:10000      
 Class :character   Class :character   Class :character   Class :character   Class :character      Class :character   Class :character   Class :character  
 Mode  :character   Mode  :character   Mode  :character   Mode  :character   Mode  :character      Mode  :character   Mode  :character   Mode  :character  
                                                                                                                                                           
                                                                                                                                                           
                                                                                                                                                           
 Monthly.Grocery.Bill Frequency.of.Traveling.by.Air Vehicle.Monthly.Distance.Km Waste.Bag.Size     Waste.Bag.Weekly.Count How.Long.TV.PC.Daily.Hour
 Min.   : 50.0        Length:10000                  Min.   :   0                Length:10000       Min.   :1.000          Min.   : 0.00            
 1st Qu.:111.0        Class :character              1st Qu.:  69                Class :character   1st Qu.:2.000          1st Qu.: 6.00            
 Median :173.0        Mode  :character              Median : 823                Mode  :character   Median :4.000          Median :12.00            
 Mean   :173.9                                      Mean   :2031                                   Mean   :4.025          Mean   :12.14            
 3rd Qu.:237.0                                      3rd Qu.:2517                                   3rd Qu.:6.000          3rd Qu.:18.00            
 Max.   :299.0                                      Max.   :9999                                   Max.   :7.000          Max.   :24.00            
 How.Many.New.Clothes.Monthly How.Long.Internet.Daily.Hour Energy.efficiency   Recycling         Cooking_With       CarbonEmission
 Min.   : 0.00                Min.   : 0.00                Length:10000       Length:10000       Length:10000       Min.   : 306  
 1st Qu.:13.00                1st Qu.: 6.00                Class :character   Class :character   Class :character   1st Qu.:1538  
 Median :25.00                Median :12.00                Mode  :character   Mode  :character   Mode  :character   Median :2080  
 Mean   :25.11                Mean   :11.89                                                                         Mean   :2269  
 3rd Qu.:38.00                3rd Qu.:18.00                                                                         3rd Qu.:2768  
 Max.   :50.00                Max.   :24.00                                                                         Max.   :8377  
str(carbonData)
'data.frame':   10000 obs. of  20 variables:
 $ Body.Type                    : chr  "overweight" "obese" "overweight" "overweight" ...
 $ Sex                          : chr  "female" "female" "male" "male" ...
 $ Diet                         : chr  "pescatarian" "vegetarian" "omnivore" "omnivore" ...
 $ How.Often.Shower             : chr  "daily" "less frequently" "more frequently" "twice a day" ...
 $ Heating.Energy.Source        : chr  "coal" "natural gas" "wood" "wood" ...
 $ Transport                    : chr  "public" "walk/bicycle" "private" "walk/bicycle" ...
 $ Vehicle.Type                 : chr  "" "" "petrol" "" ...
 $ Social.Activity              : chr  "often" "often" "never" "sometimes" ...
 $ Monthly.Grocery.Bill         : int  230 114 138 157 266 144 56 59 200 135 ...
 $ Frequency.of.Traveling.by.Air: chr  "frequently" "rarely" "never" "rarely" ...
 $ Vehicle.Monthly.Distance.Km  : int  210 9 2472 74 8457 658 5363 54 1376 440 ...
 $ Waste.Bag.Size               : chr  "large" "extra large" "small" "medium" ...
 $ Waste.Bag.Weekly.Count       : int  4 3 1 3 1 1 4 3 3 1 ...
 $ How.Long.TV.PC.Daily.Hour    : int  7 9 14 20 3 22 9 5 3 8 ...
 $ How.Many.New.Clothes.Monthly : int  26 38 47 5 5 18 11 39 31 23 ...
 $ How.Long.Internet.Daily.Hour : int  1 5 6 7 6 9 19 15 15 18 ...
 $ Energy.efficiency            : chr  "No" "No" "Sometimes" "Sometimes" ...
 $ Recycling                    : chr  "['Metal']" "['Metal']" "['Metal']" "['Paper', 'Plastic', 'Glass', 'Metal']" ...
 $ Cooking_With                 : chr  "['Stove', 'Oven']" "['Stove', 'Microwave']" "['Oven', 'Microwave']" "['Microwave', 'Grill', 'Airfryer']" ...
 $ CarbonEmission               : int  2238 1892 2595 1074 4743 1647 1832 2322 2494 1178 ...

From the str of carbon data i can see that i am having empty vehicle types as “” so i will replace them with No vehicle


carbonData$Vehicle.Type[carbonData$Transport=='public'|carbonData$Transport=='walk/bicycle']<-'FuelEfficient'
#carbonData<- carbonData %>% mutate(Vehicle.Type=ifelse(Vehicle.Type=="","No vehicle",Vehicle.Type))
str(carbonData)
'data.frame':   10000 obs. of  20 variables:
 $ Body.Type                    : chr  "overweight" "obese" "overweight" "overweight" ...
 $ Sex                          : chr  "female" "female" "male" "male" ...
 $ Diet                         : chr  "pescatarian" "vegetarian" "omnivore" "omnivore" ...
 $ How.Often.Shower             : chr  "daily" "less frequently" "more frequently" "twice a day" ...
 $ Heating.Energy.Source        : chr  "coal" "natural gas" "wood" "wood" ...
 $ Transport                    : chr  "public" "walk/bicycle" "private" "walk/bicycle" ...
 $ Vehicle.Type                 : chr  "FuelEfficient" "FuelEfficient" "petrol" "FuelEfficient" ...
 $ Social.Activity              : chr  "often" "often" "never" "sometimes" ...
 $ Monthly.Grocery.Bill         : int  230 114 138 157 266 144 56 59 200 135 ...
 $ Frequency.of.Traveling.by.Air: chr  "frequently" "rarely" "never" "rarely" ...
 $ Vehicle.Monthly.Distance.Km  : int  210 9 2472 74 8457 658 5363 54 1376 440 ...
 $ Waste.Bag.Size               : chr  "large" "extra large" "small" "medium" ...
 $ Waste.Bag.Weekly.Count       : int  4 3 1 3 1 1 4 3 3 1 ...
 $ How.Long.TV.PC.Daily.Hour    : int  7 9 14 20 3 22 9 5 3 8 ...
 $ How.Many.New.Clothes.Monthly : int  26 38 47 5 5 18 11 39 31 23 ...
 $ How.Long.Internet.Daily.Hour : int  1 5 6 7 6 9 19 15 15 18 ...
 $ Energy.efficiency            : chr  "No" "No" "Sometimes" "Sometimes" ...
 $ Recycling                    : chr  "['Metal']" "['Metal']" "['Metal']" "['Paper', 'Plastic', 'Glass', 'Metal']" ...
 $ Cooking_With                 : chr  "['Stove', 'Oven']" "['Stove', 'Microwave']" "['Oven', 'Microwave']" "['Microwave', 'Grill', 'Airfryer']" ...
 $ CarbonEmission               : int  2238 1892 2595 1074 4743 1647 1832 2322 2494 1178 ...
#carbonData[carbonData == ""]<-NA
colSums(is.na(carbonData))
                    Body.Type                           Sex                          Diet              How.Often.Shower         Heating.Energy.Source 
                            0                             0                             0                             0                             0 
                    Transport                  Vehicle.Type               Social.Activity          Monthly.Grocery.Bill Frequency.of.Traveling.by.Air 
                            0                             0                             0                             0                             0 
  Vehicle.Monthly.Distance.Km                Waste.Bag.Size        Waste.Bag.Weekly.Count     How.Long.TV.PC.Daily.Hour  How.Many.New.Clothes.Monthly 
                            0                             0                             0                             0                             0 
 How.Long.Internet.Daily.Hour             Energy.efficiency                     Recycling                  Cooking_With                CarbonEmission 
                            0                             0                             0                             0                             0 
library(dplyr)
carbonData<-carbonData %>%
  mutate_if(is.character, as.factor)%>%
  mutate_if(is.integer, as.numeric)

str(carbonData)
'data.frame':   10000 obs. of  20 variables:
 $ Body.Type                    : Factor w/ 4 levels "normal","obese",..: 3 2 3 3 2 3 4 4 3 4 ...
 $ Sex                          : Factor w/ 2 levels "female","male": 1 1 2 2 1 2 1 1 2 1 ...
 $ Diet                         : Factor w/ 4 levels "omnivore","pescatarian",..: 2 4 1 1 4 4 3 3 1 2 ...
 $ How.Often.Shower             : Factor w/ 4 levels "daily","less frequently",..: 1 2 3 4 1 2 2 3 1 1 ...
 $ Heating.Energy.Source        : Factor w/ 4 levels "coal","electricity",..: 1 3 4 4 1 4 4 1 4 4 ...
 $ Transport                    : Factor w/ 3 levels "private","public",..: 2 3 1 3 1 2 1 3 2 2 ...
 $ Vehicle.Type                 : Factor w/ 6 levels "diesel","electric",..: 3 3 6 3 1 3 4 3 3 3 ...
 $ Social.Activity              : Factor w/ 3 levels "never","often",..: 2 2 1 3 2 3 1 3 1 2 ...
 $ Monthly.Grocery.Bill         : num  230 114 138 157 266 144 56 59 200 135 ...
 $ Frequency.of.Traveling.by.Air: Factor w/ 4 levels "frequently","never",..: 1 3 2 3 4 1 3 4 1 3 ...
 $ Vehicle.Monthly.Distance.Km  : num  210 9 2472 74 8457 ...
 $ Waste.Bag.Size               : Factor w/ 4 levels "extra large",..: 2 1 4 3 2 2 3 1 3 1 ...
 $ Waste.Bag.Weekly.Count       : num  4 3 1 3 1 1 4 3 3 1 ...
 $ How.Long.TV.PC.Daily.Hour    : num  7 9 14 20 3 22 9 5 3 8 ...
 $ How.Many.New.Clothes.Monthly : num  26 38 47 5 5 18 11 39 31 23 ...
 $ How.Long.Internet.Daily.Hour : num  1 5 6 7 6 9 19 15 15 18 ...
 $ Energy.efficiency            : Factor w/ 3 levels "No","Sometimes",..: 1 1 2 2 3 2 2 1 3 2 ...
 $ Recycling                    : Factor w/ 16 levels "['Glass', 'Metal']",..: 3 3 3 7 11 4 16 8 2 2 ...
 $ Cooking_With                 : Factor w/ 16 levels "['Grill', 'Airfryer']",..: 14 10 6 2 7 13 1 10 2 2 ...
 $ CarbonEmission               : num  2238 1892 2595 1074 4743 ...
summary(carbonData)
       Body.Type        Sex                Diet             How.Often.Shower Heating.Energy.Source        Transport           Vehicle.Type   Social.Activity
 normal     :2473   female:5007   omnivore   :2492   daily          :2546    coal       :2523      private     :3279   diesel       : 622   never    :3406  
 obese      :2500   male  :4993   pescatarian:2554   less frequently:2487    electricity:2552      public      :3294   electric     : 671   often    :3319  
 overweight :2487                 vegan      :2497   more frequently:2451    natural gas:2462      walk/bicycle:3427   FuelEfficient:6721   sometimes:3275  
 underweight:2540                 vegetarian :2457   twice a day    :2516    wood       :2463                          hybrid       : 642                   
                                                                                                                       lpg          : 697                   
                                                                                                                       petrol       : 647                   
                                                                                                                                                            
 Monthly.Grocery.Bill Frequency.of.Traveling.by.Air Vehicle.Monthly.Distance.Km     Waste.Bag.Size Waste.Bag.Weekly.Count How.Long.TV.PC.Daily.Hour
 Min.   : 50.0        frequently     :2524          Min.   :   0                extra large:2500   Min.   :1.000          Min.   : 0.00            
 1st Qu.:111.0        never          :2459          1st Qu.:  69                large      :2501   1st Qu.:2.000          1st Qu.: 6.00            
 Median :173.0        rarely         :2477          Median : 823                medium     :2474   Median :4.000          Median :12.00            
 Mean   :173.9        very frequently:2540          Mean   :2031                small      :2525   Mean   :4.025          Mean   :12.14            
 3rd Qu.:237.0                                      3rd Qu.:2517                                   3rd Qu.:6.000          3rd Qu.:18.00            
 Max.   :299.0                                      Max.   :9999                                   Max.   :7.000          Max.   :24.00            
                                                                                                                                                   
 How.Many.New.Clothes.Monthly How.Long.Internet.Daily.Hour Energy.efficiency                                  Recycling   
 Min.   : 0.00                Min.   : 0.00                No       :3221    []                                    : 675  
 1st Qu.:13.00                1st Qu.: 6.00                Sometimes:3463    ['Paper', 'Plastic', 'Metal']         : 648  
 Median :25.00                Median :12.00                Yes      :3316    ['Paper', 'Glass', 'Metal']           : 647  
 Mean   :25.11                Mean   :11.89                                  ['Glass', 'Metal']                    : 645  
 3rd Qu.:38.00                3rd Qu.:18.00                                  ['Paper', 'Plastic', 'Glass', 'Metal']: 637  
 Max.   :50.00                Max.   :24.00                                  ['Paper', 'Plastic']                  : 633  
                                                                             (Other)                               :6115  
                                              Cooking_With  CarbonEmission
 ['Stove', 'Oven']                                  : 670   Min.   : 306  
 ['Stove', 'Microwave', 'Grill', 'Airfryer']        : 652   1st Qu.:1538  
 ['Oven', 'Microwave']                              : 649   Median :2080  
 ['Oven', 'Microwave', 'Grill', 'Airfryer']         : 638   Mean   :2269  
 ['Stove', 'Oven', 'Microwave', 'Grill', 'Airfryer']: 637   3rd Qu.:2768  
 ['Stove', 'Grill', 'Airfryer']                     : 628   Max.   :8377  
 (Other)                                            :6126                 
table(carbonData$Body.Type)

     normal       obese  overweight underweight 
       2473        2500        2487        2540 
table(carbonData$Sex)

female   male 
  5007   4993 
table(carbonData$Diet)

   omnivore pescatarian       vegan  vegetarian 
       2492        2554        2497        2457 
table(carbonData$How.Often.Shower)

          daily less frequently more frequently     twice a day 
           2546            2487            2451            2516 
table(carbonData$Heating.Energy.Source)

       coal electricity natural gas        wood 
       2523        2552        2462        2463 
table(carbonData$Transport)

     private       public walk/bicycle 
        3279         3294         3427 
table(carbonData$Social.Activity)

    never     often sometimes 
     3406      3319      3275 
table(carbonData$Frequency.of.Traveling.by.Air)

     frequently           never          rarely very frequently 
           2524            2459            2477            2540 
table(carbonData$Waste.Bag.Size)

extra large       large      medium       small 
       2500        2501        2474        2525 
table(carbonData$Energy.efficiency)

       No Sometimes       Yes 
     3221      3463      3316 
hist(carbonData$CarbonEmission)

carbonData$CarbonEmission<-log(carbonData$CarbonEmission) 
hist(carbonData$CarbonEmission)

carbonIndices<-which(names(carbonData)=='CarbonEmission')
for (c in colnames(carbonData[,-carbonIndices])) {
  if(is.factor(carbonData[,c])){
    try({
        anovaaResult<-aov(carbonData$CarbonEmission~carbonData[,c])
        cat("ANOVA of ",c, "and CarbonEmission", "\n")
        print(summary(anovaaResult))
        boxplot(carbonData$CarbonEmission~carbonData[,c],shade=TRUE, main = paste("Carbon Emission vs", c), xlab ="CarbonEmission", ylab=c ,col="lightgreen")
        
      })
  }
  else if (is.numeric(carbonData[,c])){
    try({
      corTest<-cor.test(carbonData$CarbonEmission,carbonData[,c], method = "pearson")
      cat("p.value of ",c, "and Carbon Emission", corTest$p.value, "\n")
      plot(carbonData$CarbonEmission,carbonData[,c], main = paste("Carbon Emission vs", c), xlab ="Carbon Emission", ylab=c)
    })
  }
  
}
ANOVA of  Body.Type and CarbonEmission 
                  Df Sum Sq Mean Sq F value Pr(>F)    
carbonData[, c]    3   82.7  27.583     149 <2e-16 ***
Residuals       9996 1850.3   0.185                   
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
ANOVA of  Sex and CarbonEmission 
                  Df Sum Sq Mean Sq F value Pr(>F)    
carbonData[, c]    1   55.5   55.51   295.6 <2e-16 ***
Residuals       9998 1877.5    0.19                   
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

ANOVA of  Diet and CarbonEmission 
                  Df Sum Sq Mean Sq F value   Pr(>F)    
carbonData[, c]    3   11.8   3.944   20.52 2.98e-13 ***
Residuals       9996 1921.2   0.192                     
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

ANOVA of  How.Often.Shower and CarbonEmission 
                  Df Sum Sq Mean Sq F value Pr(>F)  
carbonData[, c]    3    1.9  0.6333   3.278 0.0201 *
Residuals       9996 1931.1  0.1932                 
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

ANOVA of  Heating.Energy.Source and CarbonEmission 
                  Df Sum Sq Mean Sq F value Pr(>F)    
carbonData[, c]    3   71.1  23.702   127.2 <2e-16 ***
Residuals       9996 1861.9   0.186                   
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

ANOVA of  Transport and CarbonEmission 
                  Df Sum Sq Mean Sq F value Pr(>F)    
carbonData[, c]    2  384.7  192.34    1242 <2e-16 ***
Residuals       9997 1548.3    0.15                   
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

ANOVA of  Vehicle.Type and CarbonEmission 
                  Df Sum Sq Mean Sq F value Pr(>F)    
carbonData[, c]    5  573.6  114.71   843.3 <2e-16 ***
Residuals       9994 1359.5    0.14                   
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

ANOVA of  Social.Activity and CarbonEmission 
                  Df Sum Sq Mean Sq F value   Pr(>F)    
carbonData[, c]    2    8.5   4.248   22.07 2.74e-10 ***
Residuals       9997 1924.5   0.193                     
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

p.value of  Monthly.Grocery.Bill and Carbon Emission 8.380793e-21 

ANOVA of  Frequency.of.Traveling.by.Air and CarbonEmission 
                  Df Sum Sq Mean Sq F value Pr(>F)    
carbonData[, c]    3  568.2  189.41    1387 <2e-16 ***
Residuals       9996 1364.8    0.14                   
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

p.value of  Vehicle.Monthly.Distance.Km and Carbon Emission 0 

ANOVA of  Waste.Bag.Size and CarbonEmission 
                  Df Sum Sq Mean Sq F value Pr(>F)    
carbonData[, c]    3   53.1  17.692   94.07 <2e-16 ***
Residuals       9996 1879.9   0.188                   
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

p.value of  Waste.Bag.Weekly.Count and Carbon Emission 3.257926e-79 

p.value of  How.Long.TV.PC.Daily.Hour and Carbon Emission 0.2282313 

p.value of  How.Many.New.Clothes.Monthly and Carbon Emission 8.851718e-131 

p.value of  How.Long.Internet.Daily.Hour and Carbon Emission 1.411988e-09 

ANOVA of  Energy.efficiency and CarbonEmission 
                  Df Sum Sq Mean Sq F value Pr(>F)
carbonData[, c]    2    0.7  0.3284   1.699  0.183
Residuals       9997 1932.4  0.1933               

ANOVA of  Recycling and CarbonEmission 
                  Df Sum Sq Mean Sq F value Pr(>F)    
carbonData[, c]   15   41.7  2.7802   14.68 <2e-16 ***
Residuals       9984 1891.3  0.1894                   
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

ANOVA of  Cooking_With and CarbonEmission 
                  Df Sum Sq Mean Sq F value Pr(>F)  
carbonData[, c]   15      5  0.3301   1.709 0.0422 *
Residuals       9984   1928  0.1931                 
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

library(tidyverse)
parseList<-function(x,emptyLable){
  parsedItem<-str_remove_all(x,"\\[|\\]|'")%>%
    strsplit(", ")%>%
    unlist()
  
  if(length(parsedItem)==0||all(parsedItem=="")){
    parsedItem<-emptyLable
  }
  return(parsedItem)
}
carbonData$Recycling<-sapply(carbonData$Recycling,parseList,emptyLable="No_Recycling")
carbonData$Cooking_With<-sapply(carbonData$Cooking_With,parseList,emptyLable="No_Cooking")

carbonData$Recycling<-sapply(carbonData$Recycling,paste,collapse=",")
carbonData$Cooking_With<-sapply(carbonData$Cooking_With,paste,collapse=",")

#str(carbonData)

dummies<-function(col){

  items<-unlist(str_split(col,","))
  items<-trimws(items)
  items<-items[items != ""]
  
  uniqueItems<-unique(c(items))
  dummyDataFrame<-data.frame(matrix(0,nrow = length(col),ncol = length(uniqueItems)))
  colnames(dummyDataFrame)<-uniqueItems
  
  for (i in seq_along(col)) {
    rowItems<-unlist(str_split(col[i],","))%>%
    map_chr(~str_trim(.))%>%
    discard(~.=="")
    
    rowItems<-rowItems[rowItems %in% uniqueItems]
    dummyDataFrame[i,rowItems]<-1
  }
  return(dummyDataFrame)
}

recyclingDummies<-dummies(carbonData$Recycling)
cookingDummies<-dummies(carbonData$Cooking_With)


carbonData<-cbind(carbonData,recyclingDummies,cookingDummies)

carbonData$Recycling<- NULL
carbonData$Cooking_With<-NULL

str(carbonData)
'data.frame':   10000 obs. of  29 variables:
 $ Body.Type                    : Factor w/ 4 levels "normal","obese",..: 3 2 3 3 2 3 4 4 3 4 ...
 $ Sex                          : Factor w/ 2 levels "female","male": 1 1 2 2 1 2 1 1 2 1 ...
 $ Diet                         : Factor w/ 4 levels "omnivore","pescatarian",..: 2 4 1 1 4 4 3 3 1 2 ...
 $ How.Often.Shower             : Factor w/ 4 levels "daily","less frequently",..: 1 2 3 4 1 2 2 3 1 1 ...
 $ Heating.Energy.Source        : Factor w/ 4 levels "coal","electricity",..: 1 3 4 4 1 4 4 1 4 4 ...
 $ Transport                    : Factor w/ 3 levels "private","public",..: 2 3 1 3 1 2 1 3 2 2 ...
 $ Vehicle.Type                 : Factor w/ 6 levels "diesel","electric",..: 3 3 6 3 1 3 4 3 3 3 ...
 $ Social.Activity              : Factor w/ 3 levels "never","often",..: 2 2 1 3 2 3 1 3 1 2 ...
 $ Monthly.Grocery.Bill         : num  230 114 138 157 266 144 56 59 200 135 ...
 $ Frequency.of.Traveling.by.Air: Factor w/ 4 levels "frequently","never",..: 1 3 2 3 4 1 3 4 1 3 ...
 $ Vehicle.Monthly.Distance.Km  : num  210 9 2472 74 8457 ...
 $ Waste.Bag.Size               : Factor w/ 4 levels "extra large",..: 2 1 4 3 2 2 3 1 3 1 ...
 $ Waste.Bag.Weekly.Count       : num  4 3 1 3 1 1 4 3 3 1 ...
 $ How.Long.TV.PC.Daily.Hour    : num  7 9 14 20 3 22 9 5 3 8 ...
 $ How.Many.New.Clothes.Monthly : num  26 38 47 5 5 18 11 39 31 23 ...
 $ How.Long.Internet.Daily.Hour : num  1 5 6 7 6 9 19 15 15 18 ...
 $ Energy.efficiency            : Factor w/ 3 levels "No","Sometimes",..: 1 1 2 2 3 2 2 1 3 2 ...
 $ CarbonEmission               : num  7.71 7.55 7.86 6.98 8.46 ...
 $ Metal                        : num  1 1 1 1 0 1 0 0 0 0 ...
 $ Paper                        : num  0 0 0 1 1 1 0 1 0 0 ...
 $ Plastic                      : num  0 0 0 1 0 0 0 1 0 0 ...
 $ Glass                        : num  0 0 0 1 0 1 0 1 1 1 ...
 $ No_Recycling                 : num  0 0 0 0 0 0 1 0 0 0 ...
 $ Stove                        : num  1 1 0 0 0 1 0 1 0 0 ...
 $ Oven                         : num  1 0 1 0 1 1 0 0 0 0 ...
 $ Microwave                    : num  0 1 1 1 0 1 0 1 1 1 ...
 $ Grill                        : num  0 0 0 1 0 0 1 0 1 1 ...
 $ Airfryer                     : num  0 0 0 1 0 0 1 0 1 1 ...
 $ No_Cooking                   : num  0 0 0 0 0 0 0 0 0 0 ...
library(caret)

carbonDataIndexs <- createDataPartition(carbonData$CarbonEmission, p=0.8, list=FALSE)

carbonTrainData<-carbonData[carbonDataIndexs,]
carbonTrainData

carbonTestData<-carbonData[-carbonDataIndexs,]
carbonTestData

carbonTestLabels<-carbonTestData$CarbonEmission

#Benchmark

meanTransport<- carbonTrainData %>% #this will calculate emission per transport type 
  group_by(Transport) %>%
  summarize(meanEmission= mean(CarbonEmission, na.rm = TRUE))

meanTransportTable<-setNames(meanTransport$meanEmission,meanTransport$Transport)  # will store transport levels to its average emission

predictTransport<-function(row){
  transportType<-as.character(row["Transport"])
  if(transportType %in% names(meanTransportTable)){
    return(meanTransportTable[[transportType]])
  }
}

benchmarkPred<-apply(carbonTestData,1,predictTransport) # will apply predictTransport function on the test data


rmse(benchmarkPred,carbonTestData$CarbonEmission)
[1] 0.001769568
MAE(benchmarkPred,carbonTestData$CarbonEmission)
[1] 0.3201579
rsquaredNew<-sum((benchmarkPred-carbonTestData$CarbonEmission)^2)/sum((carbonTestData$CarbonEmission-mean(carbonTestData$CarbonEmission))^2)
rsquaredNew
[1] 0.8108719

#KNN Model

knnModel<-train(CarbonEmission~.,data = carbonTrainData, method="knn", trControl=trainControl(method = "cv", number=5))
knnModel
k-Nearest Neighbors 

8001 samples
  28 predictor

No pre-processing
Resampling: Cross-Validated (5 fold) 
Summary of sample sizes: 6402, 6400, 6401, 6400, 6401 
Resampling results across tuning parameters:

  k  RMSE       Rsquared   MAE      
  5  0.3935491  0.2364802  0.3108153
  7  0.3847708  0.2561085  0.3047360
  9  0.3814251  0.2635529  0.3024925

RMSE was used to select the optimal model using the smallest value.
The final value used for the model was k = 9.
knnPred<-predict(knnModel,newdata = carbonTestData)

rmse=function(x,y){
  return((mean(x-y)^2)^0.5)
}
rmse(knnPred,carbonTestLabels)
[1] 0.008526172
lmModel<-train(CarbonEmission~.,data = carbonTrainData, method="lm", trControl=trainControl(method = "cv", number=5))
lmModel
Linear Regression 

8001 samples
  28 predictor

No pre-processing
Resampling: Cross-Validated (5 fold) 
Summary of sample sizes: 6402, 6402, 6400, 6400, 6400 
Resampling results:

  RMSE       Rsquared   MAE       
  0.1210893  0.9245749  0.08667648

Tuning parameter 'intercept' was held constant at a value of TRUE
summary(lmModel)

Call:
lm(formula = .outcome ~ ., data = dat)

Residuals:
     Min       1Q   Median       3Q      Max 
-0.77857 -0.05514  0.00892  0.06598  0.46663 

Coefficients: (2 not defined because of singularities)
                                                 Estimate Std. Error t value Pr(>|t|)    
(Intercept)                                     7.421e+00  1.191e-02 623.323  < 2e-16 ***
Body.Typeobese                                  1.878e-01  3.817e-03  49.202  < 2e-16 ***
Body.Typeoverweight                             9.329e-02  3.845e-03  24.262  < 2e-16 ***
Body.Typeunderweight                           -4.985e-02  3.810e-03 -13.084  < 2e-16 ***
Sexmale                                         1.515e-01  2.707e-03  55.959  < 2e-16 ***
Dietpescatarian                                -4.321e-02  3.796e-03 -11.382  < 2e-16 ***
Dietvegan                                      -7.942e-02  3.830e-03 -20.737  < 2e-16 ***
Dietvegetarian                                 -6.956e-02  3.845e-03 -18.091  < 2e-16 ***
`How.Often.Showerless frequently`              -1.128e-02  3.794e-03  -2.972  0.00296 ** 
`How.Often.Showermore frequently`               1.626e-02  3.821e-03   4.255 2.11e-05 ***
`How.Often.Showertwice a day`                   9.034e-03  3.811e-03   2.371  0.01778 *  
Heating.Energy.Sourceelectricity               -2.227e-01  3.800e-03 -58.619  < 2e-16 ***
`Heating.Energy.Sourcenatural gas`             -9.763e-02  3.845e-03 -25.392  < 2e-16 ***
Heating.Energy.Sourcewood                      -9.692e-02  3.830e-03 -25.306  < 2e-16 ***
Transportpublic                                -1.955e-01  6.729e-03 -29.057  < 2e-16 ***
`Transportwalk/bicycle`                        -1.719e-01  7.130e-03 -24.114  < 2e-16 ***
Vehicle.Typeelectric                           -5.084e-01  7.470e-03 -68.059  < 2e-16 ***
Vehicle.TypeFuelEfficient                              NA         NA      NA       NA    
Vehicle.Typehybrid                             -1.313e-01  7.552e-03 -17.386  < 2e-16 ***
Vehicle.Typelpg                                 4.272e-02  7.481e-03   5.710 1.17e-08 ***
Vehicle.Typepetrol                              2.001e-01  7.577e-03  26.412  < 2e-16 ***
Social.Activityoften                            8.611e-02  3.296e-03  26.128  < 2e-16 ***
Social.Activitysometimes                        4.219e-02  3.311e-03  12.744  < 2e-16 ***
Monthly.Grocery.Bill                            4.704e-04  1.874e-05  25.103  < 2e-16 ***
Frequency.of.Traveling.by.Airnever             -3.609e-01  3.835e-03 -94.111  < 2e-16 ***
Frequency.of.Traveling.by.Airrarely            -2.434e-01  3.829e-03 -63.570  < 2e-16 ***
`Frequency.of.Traveling.by.Airvery frequently`  2.669e-01  3.810e-03  70.052  < 2e-16 ***
Vehicle.Monthly.Distance.Km                     6.660e-05  8.021e-07  83.036  < 2e-16 ***
Waste.Bag.Sizelarge                            -6.449e-02  3.816e-03 -16.898  < 2e-16 ***
Waste.Bag.Sizemedium                           -1.297e-01  3.841e-03 -33.763  < 2e-16 ***
Waste.Bag.Sizesmall                            -2.006e-01  3.828e-03 -52.405  < 2e-16 ***
Waste.Bag.Weekly.Count                          4.115e-02  6.792e-04  60.592  < 2e-16 ***
How.Long.TV.PC.Daily.Hour                       1.192e-03  1.900e-04   6.272 3.74e-10 ***
How.Many.New.Clothes.Monthly                    7.133e-03  9.174e-05  77.746  < 2e-16 ***
How.Long.Internet.Daily.Hour                    4.046e-03  1.853e-04  21.829  < 2e-16 ***
Energy.efficiencySometimes                     -1.997e-02  3.304e-03  -6.043 1.58e-09 ***
Energy.efficiencyYes                           -3.255e-02  3.353e-03  -9.708  < 2e-16 ***
Metal                                          -7.208e-02  2.839e-03 -25.394  < 2e-16 ***
Paper                                          -7.743e-02  2.835e-03 -27.309  < 2e-16 ***
Plastic                                        -3.004e-02  2.834e-03 -10.602  < 2e-16 ***
Glass                                          -5.130e-02  2.831e-03 -18.117  < 2e-16 ***
No_Recycling                                   -1.334e-02  6.409e-03  -2.081  0.03749 *  
Stove                                           1.266e-02  2.825e-03   4.482 7.49e-06 ***
Oven                                            1.488e-02  2.829e-03   5.260 1.47e-07 ***
Microwave                                       5.386e-03  2.822e-03   1.909  0.05632 .  
Grill                                           1.820e-02  2.817e-03   6.458 1.12e-10 ***
Airfryer                                               NA         NA      NA       NA    
No_Cooking                                     -1.236e-02  6.608e-03  -1.870  0.06150 .  
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Residual standard error: 0.1207 on 7955 degrees of freedom
Multiple R-squared:  0.9256,    Adjusted R-squared:  0.9252 
F-statistic:  2199 on 45 and 7955 DF,  p-value: < 2.2e-16
stepwiseModel<-train(CarbonEmission~.,data = carbonTrainData, method="leapBackward", trControl=trainControl(method = "cv", number=5))
Warning: 2  linear dependencies found
Reordering variables and trying again:
Warning: 2  linear dependencies found
Reordering variables and trying again:
Warning: 2  linear dependencies found
Reordering variables and trying again:
Warning: 2  linear dependencies found
Reordering variables and trying again:
Warning: 2  linear dependencies found
Reordering variables and trying again:
Warning: 2  linear dependencies found
Reordering variables and trying again:
stepwiseModel
Linear Regression with Backwards Selection 

8001 samples
  28 predictor

No pre-processing
Resampling: Cross-Validated (5 fold) 
Summary of sample sizes: 6401, 6400, 6402, 6402, 6399 
Resampling results across tuning parameters:

  nvmax  RMSE       Rsquared   MAE      
  2      0.3782825  0.2651774  0.3006870
  3      0.3607228  0.3315503  0.2868409
  4      0.3471377  0.3809492  0.2731293

RMSE was used to select the optimal model using the smallest value.
The final value used for the model was nvmax = 4.
summary(stepwiseModel$finalModel)
Subset selection object
47 Variables  (and intercept)
                                             Forced in Forced out
Body.Typeobese                                   FALSE      FALSE
Body.Typeoverweight                              FALSE      FALSE
Body.Typeunderweight                             FALSE      FALSE
Sexmale                                          FALSE      FALSE
Dietpescatarian                                  FALSE      FALSE
Dietvegan                                        FALSE      FALSE
Dietvegetarian                                   FALSE      FALSE
How.Often.Showerless frequently                  FALSE      FALSE
How.Often.Showermore frequently                  FALSE      FALSE
How.Often.Showertwice a day                      FALSE      FALSE
Heating.Energy.Sourceelectricity                 FALSE      FALSE
Heating.Energy.Sourcenatural gas                 FALSE      FALSE
Heating.Energy.Sourcewood                        FALSE      FALSE
Transportpublic                                  FALSE      FALSE
Transportwalk/bicycle                            FALSE      FALSE
Vehicle.Typeelectric                             FALSE      FALSE
Vehicle.Typehybrid                               FALSE      FALSE
Vehicle.Typelpg                                  FALSE      FALSE
Vehicle.Typepetrol                               FALSE      FALSE
Social.Activityoften                             FALSE      FALSE
Social.Activitysometimes                         FALSE      FALSE
Monthly.Grocery.Bill                             FALSE      FALSE
Frequency.of.Traveling.by.Airnever               FALSE      FALSE
Frequency.of.Traveling.by.Airrarely              FALSE      FALSE
Frequency.of.Traveling.by.Airvery frequently     FALSE      FALSE
Vehicle.Monthly.Distance.Km                      FALSE      FALSE
Waste.Bag.Sizelarge                              FALSE      FALSE
Waste.Bag.Sizemedium                             FALSE      FALSE
Waste.Bag.Sizesmall                              FALSE      FALSE
Waste.Bag.Weekly.Count                           FALSE      FALSE
How.Long.TV.PC.Daily.Hour                        FALSE      FALSE
How.Many.New.Clothes.Monthly                     FALSE      FALSE
How.Long.Internet.Daily.Hour                     FALSE      FALSE
Energy.efficiencySometimes                       FALSE      FALSE
Energy.efficiencyYes                             FALSE      FALSE
Metal                                            FALSE      FALSE
Paper                                            FALSE      FALSE
Plastic                                          FALSE      FALSE
Glass                                            FALSE      FALSE
No_Recycling                                     FALSE      FALSE
Stove                                            FALSE      FALSE
Oven                                             FALSE      FALSE
Microwave                                        FALSE      FALSE
Grill                                            FALSE      FALSE
No_Cooking                                       FALSE      FALSE
Vehicle.TypeFuelEfficient                        FALSE      FALSE
Airfryer                                         FALSE      FALSE
1 subsets of each size up to 5
Selection Algorithm: backward
         Body.Typeobese Body.Typeoverweight Body.Typeunderweight Sexmale Dietpescatarian Dietvegan Dietvegetarian How.Often.Showerless frequently
1  ( 1 ) " "            " "                 " "                  " "     " "             " "       " "            " "                            
2  ( 1 ) " "            " "                 " "                  " "     " "             " "       " "            " "                            
3  ( 1 ) " "            " "                 " "                  " "     " "             " "       " "            " "                            
4  ( 1 ) " "            " "                 " "                  " "     " "             " "       " "            " "                            
5  ( 1 ) " "            " "                 " "                  " "     " "             " "       " "            " "                            
         How.Often.Showermore frequently How.Often.Showertwice a day Heating.Energy.Sourceelectricity Heating.Energy.Sourcenatural gas
1  ( 1 ) " "                             " "                         " "                              " "                             
2  ( 1 ) " "                             " "                         " "                              " "                             
3  ( 1 ) " "                             " "                         " "                              " "                             
4  ( 1 ) " "                             " "                         " "                              " "                             
5  ( 1 ) " "                             " "                         " "                              " "                             
         Heating.Energy.Sourcewood Transportpublic Transportwalk/bicycle Vehicle.Typeelectric Vehicle.TypeFuelEfficient Vehicle.Typehybrid Vehicle.Typelpg
1  ( 1 ) " "                       " "             " "                   " "                  " "                       " "                " "            
2  ( 1 ) " "                       " "             " "                   " "                  " "                       " "                " "            
3  ( 1 ) " "                       " "             " "                   "*"                  " "                       " "                " "            
4  ( 1 ) " "                       " "             " "                   "*"                  " "                       " "                " "            
5  ( 1 ) " "                       " "             " "                   "*"                  " "                       " "                " "            
         Vehicle.Typepetrol Social.Activityoften Social.Activitysometimes Monthly.Grocery.Bill Frequency.of.Traveling.by.Airnever
1  ( 1 ) " "                " "                  " "                      " "                  " "                               
2  ( 1 ) " "                " "                  " "                      " "                  " "                               
3  ( 1 ) " "                " "                  " "                      " "                  " "                               
4  ( 1 ) " "                " "                  " "                      " "                  " "                               
5  ( 1 ) " "                " "                  " "                      " "                  "*"                               
         Frequency.of.Traveling.by.Airrarely Frequency.of.Traveling.by.Airvery frequently Vehicle.Monthly.Distance.Km Waste.Bag.Sizelarge
1  ( 1 ) " "                                 " "                                          "*"                         " "                
2  ( 1 ) " "                                 "*"                                          "*"                         " "                
3  ( 1 ) " "                                 "*"                                          "*"                         " "                
4  ( 1 ) " "                                 "*"                                          "*"                         " "                
5  ( 1 ) " "                                 "*"                                          "*"                         " "                
         Waste.Bag.Sizemedium Waste.Bag.Sizesmall Waste.Bag.Weekly.Count How.Long.TV.PC.Daily.Hour How.Many.New.Clothes.Monthly How.Long.Internet.Daily.Hour
1  ( 1 ) " "                  " "                 " "                    " "                       " "                          " "                         
2  ( 1 ) " "                  " "                 " "                    " "                       " "                          " "                         
3  ( 1 ) " "                  " "                 " "                    " "                       " "                          " "                         
4  ( 1 ) " "                  " "                 " "                    " "                       "*"                          " "                         
5  ( 1 ) " "                  " "                 " "                    " "                       "*"                          " "                         
         Energy.efficiencySometimes Energy.efficiencyYes Metal Paper Plastic Glass No_Recycling Stove Oven Microwave Grill Airfryer No_Cooking
1  ( 1 ) " "                        " "                  " "   " "   " "     " "   " "          " "   " "  " "       " "   " "      " "       
2  ( 1 ) " "                        " "                  " "   " "   " "     " "   " "          " "   " "  " "       " "   " "      " "       
3  ( 1 ) " "                        " "                  " "   " "   " "     " "   " "          " "   " "  " "       " "   " "      " "       
4  ( 1 ) " "                        " "                  " "   " "   " "     " "   " "          " "   " "  " "       " "   " "      " "       
5  ( 1 ) " "                        " "                  " "   " "   " "     " "   " "          " "   " "  " "       " "   " "      " "       

#Lasso Model

library(glmnet)
set.seed(1)
lassoModel<-train(CarbonEmission~.,data = carbonTrainData,method="glmnet",trControl= trainControl(method = "cv", number=5), tuneGrid = expand.grid(alpha=1, lambda=10^seq(-3,3,length=100))) 
Warning: There were missing values in resampled performance measures.
lassoModel
glmnet 

8001 samples
  28 predictor

No pre-processing
Resampling: Cross-Validated (5 fold) 
Summary of sample sizes: 6400, 6402, 6400, 6401, 6401 
Resampling results across tuning parameters:

  lambda        RMSE       Rsquared   MAE       
  1.000000e-03  0.1214973  0.9245000  0.08687629
  1.149757e-03  0.1215854  0.9244221  0.08694724
  1.321941e-03  0.1217004  0.9243204  0.08703862
  1.519911e-03  0.1218552  0.9241830  0.08716368
  1.747528e-03  0.1220575  0.9240036  0.08732665
  2.009233e-03  0.1223224  0.9237676  0.08754325
  2.310130e-03  0.1226701  0.9234560  0.08783140
  2.656088e-03  0.1231219  0.9230482  0.08820983
  3.053856e-03  0.1237161  0.9225053  0.08870755
  3.511192e-03  0.1244955  0.9217826  0.08936085
  4.037017e-03  0.1255163  0.9208163  0.09024974
  4.641589e-03  0.1268402  0.9195356  0.09140592
  5.336699e-03  0.1285144  0.9178867  0.09289531
  6.135907e-03  0.1306158  0.9157707  0.09473980
  7.054802e-03  0.1331601  0.9131792  0.09695460
  8.111308e-03  0.1363159  0.9098632  0.09969136
  9.326033e-03  0.1400410  0.9058695  0.10288492
  1.072267e-02  0.1440491  0.9016682  0.10627227
  1.232847e-02  0.1488871  0.8964030  0.11039431
  1.417474e-02  0.1542083  0.8906494  0.11481353
  1.629751e-02  0.1592735  0.8859180  0.11878771
  1.873817e-02  0.1653805  0.8799604  0.12357893
  2.154435e-02  0.1729973  0.8717956  0.12971927
  2.477076e-02  0.1824785  0.8603090  0.13741038
  2.848036e-02  0.1928623  0.8473535  0.14581335
  3.274549e-02  0.2039627  0.8334474  0.15485133
  3.764936e-02  0.2162439  0.8174807  0.16493342
  4.328761e-02  0.2298441  0.7988964  0.17609103
  4.977024e-02  0.2463958  0.7711065  0.18966157
  5.722368e-02  0.2665106  0.7278567  0.20621242
  6.579332e-02  0.2890668  0.6679143  0.22482408
  7.564633e-02  0.3108364  0.6023591  0.24281317
  8.697490e-02  0.3269388  0.5605917  0.25621639
  1.000000e-01  0.3417624  0.5270994  0.26858359
  1.149757e-01  0.3565354  0.4957222  0.28104235
  1.321941e-01  0.3710455  0.4761545  0.29352937
  1.519911e-01  0.3872305  0.4578507  0.30762038
  1.747528e-01  0.4061797  0.4320649  0.32419438
  2.009233e-01  0.4274621  0.3153036  0.34240298
  2.310130e-01  0.4411191        NaN  0.35246969
  2.656088e-01  0.4411191        NaN  0.35246969
  3.053856e-01  0.4411191        NaN  0.35246969
  3.511192e-01  0.4411191        NaN  0.35246969
  4.037017e-01  0.4411191        NaN  0.35246969
  4.641589e-01  0.4411191        NaN  0.35246969
  5.336699e-01  0.4411191        NaN  0.35246969
  6.135907e-01  0.4411191        NaN  0.35246969
  7.054802e-01  0.4411191        NaN  0.35246969
  8.111308e-01  0.4411191        NaN  0.35246969
  9.326033e-01  0.4411191        NaN  0.35246969
  1.072267e+00  0.4411191        NaN  0.35246969
  1.232847e+00  0.4411191        NaN  0.35246969
  1.417474e+00  0.4411191        NaN  0.35246969
  1.629751e+00  0.4411191        NaN  0.35246969
  1.873817e+00  0.4411191        NaN  0.35246969
  2.154435e+00  0.4411191        NaN  0.35246969
  2.477076e+00  0.4411191        NaN  0.35246969
  2.848036e+00  0.4411191        NaN  0.35246969
  3.274549e+00  0.4411191        NaN  0.35246969
  3.764936e+00  0.4411191        NaN  0.35246969
  4.328761e+00  0.4411191        NaN  0.35246969
  4.977024e+00  0.4411191        NaN  0.35246969
  5.722368e+00  0.4411191        NaN  0.35246969
  6.579332e+00  0.4411191        NaN  0.35246969
  7.564633e+00  0.4411191        NaN  0.35246969
  8.697490e+00  0.4411191        NaN  0.35246969
  1.000000e+01  0.4411191        NaN  0.35246969
  1.149757e+01  0.4411191        NaN  0.35246969
  1.321941e+01  0.4411191        NaN  0.35246969
  1.519911e+01  0.4411191        NaN  0.35246969
  1.747528e+01  0.4411191        NaN  0.35246969
  2.009233e+01  0.4411191        NaN  0.35246969
  2.310130e+01  0.4411191        NaN  0.35246969
  2.656088e+01  0.4411191        NaN  0.35246969
  3.053856e+01  0.4411191        NaN  0.35246969
  3.511192e+01  0.4411191        NaN  0.35246969
  4.037017e+01  0.4411191        NaN  0.35246969
  4.641589e+01  0.4411191        NaN  0.35246969
  5.336699e+01  0.4411191        NaN  0.35246969
  6.135907e+01  0.4411191        NaN  0.35246969
  7.054802e+01  0.4411191        NaN  0.35246969
  8.111308e+01  0.4411191        NaN  0.35246969
  9.326033e+01  0.4411191        NaN  0.35246969
  1.072267e+02  0.4411191        NaN  0.35246969
  1.232847e+02  0.4411191        NaN  0.35246969
  1.417474e+02  0.4411191        NaN  0.35246969
  1.629751e+02  0.4411191        NaN  0.35246969
  1.873817e+02  0.4411191        NaN  0.35246969
  2.154435e+02  0.4411191        NaN  0.35246969
  2.477076e+02  0.4411191        NaN  0.35246969
  2.848036e+02  0.4411191        NaN  0.35246969
  3.274549e+02  0.4411191        NaN  0.35246969
  3.764936e+02  0.4411191        NaN  0.35246969
  4.328761e+02  0.4411191        NaN  0.35246969
  4.977024e+02  0.4411191        NaN  0.35246969
  5.722368e+02  0.4411191        NaN  0.35246969
  6.579332e+02  0.4411191        NaN  0.35246969
  7.564633e+02  0.4411191        NaN  0.35246969
  8.697490e+02  0.4411191        NaN  0.35246969
  1.000000e+03  0.4411191        NaN  0.35246969

Tuning parameter 'alpha' was held constant at a value of 1
RMSE was used to select the optimal model using the smallest value.
The final values used for the model were alpha = 1 and lambda = 0.001.
lassoLambda<-lassoModel$bestTune$lambda
lassoPredictor<- setdiff(names(carbonTrainData),"CarbonEmission")
lassoFinalModel<-glmnet(as.matrix(carbonTrainData[,lassoPredictor]),carbonTrainData[,"CarbonEmission"],alpha = 1,lambda = lassoLambda, family = "gaussian")
Warning: NAs introduced by coercion
coeff<-coef(lassoFinalModel)
coeff
29 x 1 sparse Matrix of class "dgCMatrix"
                                         s0
(Intercept)                    7.077965e+00
Body.Type                      .           
Sex                            .           
Diet                           .           
How.Often.Shower               .           
Heating.Energy.Source          .           
Transport                      .           
Vehicle.Type                   .           
Social.Activity                .           
Monthly.Grocery.Bill           4.990372e-04
Frequency.of.Traveling.by.Air  .           
Vehicle.Monthly.Distance.Km    7.930046e-05
Waste.Bag.Size                 .           
Waste.Bag.Weekly.Count         3.966627e-02
How.Long.TV.PC.Daily.Hour      6.828507e-04
How.Many.New.Clothes.Monthly   6.954544e-03
How.Long.Internet.Daily.Hour   3.466329e-03
Energy.efficiency              .           
Metal                         -6.157385e-02
Paper                         -7.169971e-02
Plastic                       -3.204086e-02
Glass                         -3.772139e-02
No_Recycling                   1.536001e-02
Stove                          7.934277e-03
Oven                           2.805123e-02
Microwave                      7.754147e-04
Grill                          7.365362e-03
Airfryer                       6.722616e-16
No_Cooking                    -1.394166e-02
zeroCoeff<-coeff==0
zeroCoeff
29 x 1 Matrix of class "lgeMatrix"
                                 s0
(Intercept)                   FALSE
Body.Type                      TRUE
Sex                            TRUE
Diet                           TRUE
How.Often.Shower               TRUE
Heating.Energy.Source          TRUE
Transport                      TRUE
Vehicle.Type                   TRUE
Social.Activity                TRUE
Monthly.Grocery.Bill          FALSE
Frequency.of.Traveling.by.Air  TRUE
Vehicle.Monthly.Distance.Km   FALSE
Waste.Bag.Size                 TRUE
Waste.Bag.Weekly.Count        FALSE
How.Long.TV.PC.Daily.Hour     FALSE
How.Many.New.Clothes.Monthly  FALSE
How.Long.Internet.Daily.Hour  FALSE
Energy.efficiency              TRUE
Metal                         FALSE
Paper                         FALSE
Plastic                       FALSE
Glass                         FALSE
No_Recycling                  FALSE
Stove                         FALSE
Oven                          FALSE
Microwave                     FALSE
Grill                         FALSE
Airfryer                      FALSE
No_Cooking                    FALSE
plot(lassoModel)

#Ridge Model

set.seed(1)
ridgeModel<-train(CarbonEmission~.,data = carbonTrainData,method="glmnet",trControl= trainControl(method = "cv", number=5), tuneGrid = expand.grid(alpha=0, lambda=10^seq(-3,3,length=100))) 
Warning: There were missing values in resampled performance measures.
ridgeModel
glmnet 

8001 samples
  28 predictor

No pre-processing
Resampling: Cross-Validated (5 fold) 
Summary of sample sizes: 6400, 6402, 6400, 6401, 6401 
Resampling results across tuning parameters:

  lambda        RMSE       Rsquared   MAE       
  1.000000e-03  0.1236296  0.9235380  0.08856450
  1.149757e-03  0.1236296  0.9235380  0.08856450
  1.321941e-03  0.1236296  0.9235380  0.08856450
  1.519911e-03  0.1236296  0.9235380  0.08856450
  1.747528e-03  0.1236296  0.9235380  0.08856450
  2.009233e-03  0.1236296  0.9235380  0.08856450
  2.310130e-03  0.1236296  0.9235380  0.08856450
  2.656088e-03  0.1236296  0.9235380  0.08856450
  3.053856e-03  0.1236296  0.9235380  0.08856450
  3.511192e-03  0.1236296  0.9235380  0.08856450
  4.037017e-03  0.1236296  0.9235380  0.08856450
  4.641589e-03  0.1236296  0.9235380  0.08856450
  5.336699e-03  0.1236296  0.9235380  0.08856450
  6.135907e-03  0.1236296  0.9235380  0.08856450
  7.054802e-03  0.1236296  0.9235380  0.08856450
  8.111308e-03  0.1236296  0.9235380  0.08856450
  9.326033e-03  0.1236296  0.9235380  0.08856450
  1.072267e-02  0.1236296  0.9235380  0.08856450
  1.232847e-02  0.1236296  0.9235380  0.08856450
  1.417474e-02  0.1236296  0.9235380  0.08856450
  1.629751e-02  0.1236296  0.9235380  0.08856450
  1.873817e-02  0.1236296  0.9235380  0.08856450
  2.154435e-02  0.1236296  0.9235380  0.08856450
  2.477076e-02  0.1240623  0.9233222  0.08891353
  2.848036e-02  0.1248547  0.9229254  0.08955337
  3.274549e-02  0.1258460  0.9224294  0.09035832
  3.764936e-02  0.1270691  0.9218201  0.09134409
  4.328761e-02  0.1285767  0.9210687  0.09255793
  4.977024e-02  0.1304045  0.9201612  0.09404563
  5.722368e-02  0.1326181  0.9190609  0.09587905
  6.579332e-02  0.1352510  0.9177554  0.09805509
  7.564633e-02  0.1383809  0.9161965  0.10064929
  8.697490e-02  0.1420236  0.9143856  0.10368213
  1.000000e-01  0.1462655  0.9122644  0.10725472
  1.149757e-01  0.1510922  0.9098562  0.11134292
  1.321941e-01  0.1566022  0.9070774  0.11604694
  1.519911e-01  0.1627365  0.9039836  0.12127662
  1.747528e-01  0.1695983  0.9004756  0.12712566
  2.009233e-01  0.1770753  0.8966475  0.13355774
  2.310130e-01  0.1852805  0.8923696  0.14060318
  2.656088e-01  0.1940398  0.8877770  0.14808518
  3.053856e-01  0.2034777  0.8827064  0.15611211
  3.511192e-01  0.2133536  0.8773463  0.16452594
  4.037017e-01  0.2238071  0.8714935  0.17339893
  4.641589e-01  0.2345317  0.8653986  0.18248118
  5.336699e-01  0.2456875  0.8588186  0.19188157
  6.135907e-01  0.2569113  0.8520765  0.20127888
  7.054802e-01  0.2683892  0.8448914  0.21087395
  8.111308e-01  0.2797186  0.8376721  0.22034450
  9.326033e-01  0.2911145  0.8300828  0.22982984
  1.072267e+00  0.3021559  0.8226070  0.23897247
  1.232847e+00  0.3130907  0.8148905  0.24801865
  1.417474e+00  0.3235011  0.8074583  0.25660788
  1.629751e+00  0.3336608  0.7999214  0.26496175
  1.873817e+00  0.3431761  0.7928249  0.27275909
  2.154435e+00  0.3523368  0.7857526  0.28027521
  2.477076e+00  0.3607877  0.7792359  0.28719687
  2.848036e+00  0.3688233  0.7728451  0.29377210
  3.274549e+00  0.3761349  0.7670706  0.29974973
  3.764936e+00  0.3830101  0.7614879  0.30535400
  4.328761e+00  0.3891895  0.7565288  0.31038294
  4.977024e+00  0.3949431  0.7517928  0.31506421
  5.722368e+00  0.4000593  0.7476450  0.31922348
  6.579332e+00  0.4047823  0.7437226  0.32305636
  7.564633e+00  0.4089439  0.7403284  0.32642816
  8.697490e+00  0.4127580  0.7371445  0.32952218
  1.000000e+01  0.4160930  0.7344155  0.33223076
  1.149757e+01  0.4191313  0.7318718  0.33469680
  1.321941e+01  0.4217713  0.7297079  0.33683915
  1.519911e+01  0.4241648  0.7277011  0.33878063
  1.747528e+01  0.4262338  0.7260039  0.34045671
  2.009233e+01  0.4281024  0.7244361  0.34196880
  2.310130e+01  0.4297111  0.7231162  0.34326913
  2.656088e+01  0.4311594  0.7219006  0.34443845
  3.053856e+01  0.4324024  0.7208809  0.34544178
  3.511192e+01  0.4335188  0.7199439  0.34634248
  4.037017e+01  0.4344745  0.7191601  0.34711333
  4.641589e+01  0.4353311  0.7184411  0.34780407
  5.336699e+01  0.4360631  0.7178408  0.34839424
  6.135907e+01  0.4367183  0.7172914  0.34892252
  7.054802e+01  0.4372773  0.7168332  0.34937326
  8.111308e+01  0.4377771  0.7164137  0.34977623
  9.326033e+01  0.4382030  0.7160645  0.35011968
  1.072267e+02  0.4385835  0.7157452  0.35042640
  1.232847e+02  0.4389075  0.7154796  0.35068750
  1.417474e+02  0.4391967  0.7152369  0.35092063
  1.629751e+02  0.4394428  0.7150352  0.35111896
  1.873817e+02  0.4396624  0.7148510  0.35129595
  2.154435e+02  0.4404229  0.7147417  0.35190859
  2.477076e+02  0.4411191        NaN  0.35246969
  2.848036e+02  0.4411191        NaN  0.35246969
  3.274549e+02  0.4411191        NaN  0.35246969
  3.764936e+02  0.4411191        NaN  0.35246969
  4.328761e+02  0.4411191        NaN  0.35246969
  4.977024e+02  0.4411191        NaN  0.35246969
  5.722368e+02  0.4411191        NaN  0.35246969
  6.579332e+02  0.4411191        NaN  0.35246969
  7.564633e+02  0.4411191        NaN  0.35246969
  8.697490e+02  0.4411191        NaN  0.35246969
  1.000000e+03  0.4411191        NaN  0.35246969

Tuning parameter 'alpha' was held constant at a value of 0
RMSE was used to select the optimal model using the smallest value.
The final values used for the model were alpha = 0 and lambda = 0.02154435.
ridgeLambda<-ridgeModel$bestTune$lambda
ridgePredictor<- setdiff(names(carbonTrainData),"CarbonEmission")
ridgeFinalModel<-glmnet(as.matrix(carbonTrainData[,ridgePredictor]),carbonTrainData[,"CarbonEmission"],alpha = 1,lambda = ridgeLambda, family = "gaussian")
Warning: NAs introduced by coercion
ridgeFinalModel

Call:  glmnet(x = as.matrix(carbonTrainData[, ridgePredictor]), y = carbonTrainData[,      "CarbonEmission"], family = "gaussian", alpha = 1, lambda = ridgeLambda) 

  Df  %Dev  Lambda
1  8 35.92 0.02154
plot(ridgeModel)

#Elastic Net Model

set.seed(1)
enetModel<-train(CarbonEmission~., data = carbonTrainData, method = "glmnet", trControl=trainControl(method="cv",number=5,preProc="nzv"),tuneGrid=expand.grid(alpha=seq(0,1,length=10),lambda=10^seq(-3,1,length=100)))
Warning: There were missing values in resampled performance measures.
enetModel
glmnet 

8001 samples
  28 predictor

No pre-processing
Resampling: Cross-Validated (5 fold) 
Summary of sample sizes: 6400, 6402, 6400, 6401, 6401 
Resampling results across tuning parameters:

  alpha      lambda        RMSE       Rsquared   MAE       
  0.0000000   0.001000000  0.1236296  0.9235380  0.08856450
  0.0000000   0.001097499  0.1236296  0.9235380  0.08856450
  0.0000000   0.001204504  0.1236296  0.9235380  0.08856450
  0.0000000   0.001321941  0.1236296  0.9235380  0.08856450
  0.0000000   0.001450829  0.1236296  0.9235380  0.08856450
  0.0000000   0.001592283  0.1236296  0.9235380  0.08856450
  0.0000000   0.001747528  0.1236296  0.9235380  0.08856450
  0.0000000   0.001917910  0.1236296  0.9235380  0.08856450
  0.0000000   0.002104904  0.1236296  0.9235380  0.08856450
  0.0000000   0.002310130  0.1236296  0.9235380  0.08856450
  0.0000000   0.002535364  0.1236296  0.9235380  0.08856450
  0.0000000   0.002782559  0.1236296  0.9235380  0.08856450
  0.0000000   0.003053856  0.1236296  0.9235380  0.08856450
  0.0000000   0.003351603  0.1236296  0.9235380  0.08856450
  0.0000000   0.003678380  0.1236296  0.9235380  0.08856450
  0.0000000   0.004037017  0.1236296  0.9235380  0.08856450
  0.0000000   0.004430621  0.1236296  0.9235380  0.08856450
  0.0000000   0.004862602  0.1236296  0.9235380  0.08856450
  0.0000000   0.005336699  0.1236296  0.9235380  0.08856450
  0.0000000   0.005857021  0.1236296  0.9235380  0.08856450
  0.0000000   0.006428073  0.1236296  0.9235380  0.08856450
  0.0000000   0.007054802  0.1236296  0.9235380  0.08856450
  0.0000000   0.007742637  0.1236296  0.9235380  0.08856450
  0.0000000   0.008497534  0.1236296  0.9235380  0.08856450
  0.0000000   0.009326033  0.1236296  0.9235380  0.08856450
  0.0000000   0.010235310  0.1236296  0.9235380  0.08856450
  0.0000000   0.011233240  0.1236296  0.9235380  0.08856450
  0.0000000   0.012328467  0.1236296  0.9235380  0.08856450
  0.0000000   0.013530478  0.1236296  0.9235380  0.08856450
  0.0000000   0.014849683  0.1236296  0.9235380  0.08856450
  0.0000000   0.016297508  0.1236296  0.9235380  0.08856450
  0.0000000   0.017886495  0.1236296  0.9235380  0.08856450
  0.0000000   0.019630407  0.1236296  0.9235380  0.08856450
  0.0000000   0.021544347  0.1236296  0.9235380  0.08856450
  0.0000000   0.023644894  0.1238344  0.9234369  0.08873071
  0.0000000   0.025950242  0.1243062  0.9232001  0.08910784
  0.0000000   0.028480359  0.1248547  0.9229254  0.08955337
  0.0000000   0.031257158  0.1254902  0.9226076  0.09007142
  0.0000000   0.034304693  0.1262242  0.9222412  0.09066357
  0.0000000   0.037649358  0.1270691  0.9218201  0.09134409
  0.0000000   0.041320124  0.1280380  0.9213377  0.09212398
  0.0000000   0.045348785  0.1291449  0.9207872  0.09301807
  0.0000000   0.049770236  0.1304045  0.9201612  0.09404563
  0.0000000   0.054622772  0.1318313  0.9194531  0.09522700
  0.0000000   0.059948425  0.1334415  0.9186540  0.09656021
  0.0000000   0.065793322  0.1352510  0.9177554  0.09805509
  0.0000000   0.072208090  0.1372745  0.9167500  0.09973249
  0.0000000   0.079248290  0.1395276  0.9156288  0.10160035
  0.0000000   0.086974900  0.1420236  0.9143856  0.10368213
  0.0000000   0.095454846  0.1447753  0.9130136  0.10599583
  0.0000000   0.104761575  0.1477942  0.9115069  0.10854740
  0.0000000   0.114975700  0.1510922  0.9098562  0.11134292
  0.0000000   0.126185688  0.1546780  0.9080552  0.11439741
  0.0000000   0.138488637  0.1585579  0.9060991  0.11771529
  0.0000000   0.151991108  0.1627365  0.9039836  0.12127662
  0.0000000   0.166810054  0.1672161  0.9017050  0.12509205
  0.0000000   0.183073828  0.1719965  0.8992602  0.12918270
  0.0000000   0.200923300  0.1770753  0.8966475  0.13355774
  0.0000000   0.220513074  0.1824476  0.8938641  0.13816952
  0.0000000   0.242012826  0.1881059  0.8909074  0.14302527
  0.0000000   0.265608778  0.1940398  0.8877770  0.14808518
  0.0000000   0.291505306  0.2002365  0.8844729  0.15335458
  0.0000000   0.319926714  0.2066805  0.8809955  0.15884261
  0.0000000   0.351119173  0.2133536  0.8773463  0.16452594
  0.0000000   0.385352859  0.2202354  0.8735277  0.17037196
  0.0000000   0.422924287  0.2273031  0.8695434  0.17636285
  0.0000000   0.464158883  0.2345317  0.8653986  0.18248118
  0.0000000   0.509413801  0.2418949  0.8610998  0.18869095
  0.0000000   0.559081018  0.2493644  0.8566556  0.19496905
  0.0000000   0.613590727  0.2569113  0.8520765  0.20127888
  0.0000000   0.673415066  0.2645058  0.8473748  0.20762461
  0.0000000   0.739072203  0.2721184  0.8425692  0.21399504
  0.0000000   0.811130831  0.2797186  0.8376721  0.22034450
  0.0000000   0.890215085  0.2872768  0.8326965  0.22664091
  0.0000000   0.977009957  0.2947649  0.8276676  0.23285533
  0.0000000   1.072267222  0.3021559  0.8226070  0.23897247
  0.0000000   1.176811952  0.3094244  0.8175368  0.24498596
  0.0000000   1.291549665  0.3165466  0.8124797  0.25087384
  0.0000000   1.417474163  0.3235011  0.8074583  0.25660788
  0.0000000   1.555676144  0.3302684  0.8024947  0.26217619
  0.0000000   1.707352647  0.3368316  0.7976103  0.26756072
  0.0000000   1.873817423  0.3431761  0.7928249  0.27275909
  0.0000000   2.056512308  0.3492896  0.7881569  0.27777537
  0.0000000   2.257019720  0.3551626  0.7836225  0.28259195
  0.0000000   2.477076356  0.3607877  0.7792359  0.28719687
  0.0000000   2.718588243  0.3661597  0.7750090  0.29159314
  0.0000000   2.983647240  0.3712757  0.7709515  0.29577793
  0.0000000   3.274549163  0.3761349  0.7670706  0.29974973
  0.0000000   3.593813664  0.3807383  0.7633715  0.30350446
  0.0000000   3.944206059  0.3850884  0.7598572  0.30704491
  0.0000000   4.328761281  0.3891895  0.7565288  0.31038294
  0.0000000   4.750810162  0.3930472  0.7533859  0.31352223
  0.0000000   5.214008288  0.3966679  0.7504259  0.31646616
  0.0000000   5.722367659  0.4000593  0.7476450  0.31922348
  0.0000000   6.280291442  0.4032297  0.7450389  0.32179736
  0.0000000   6.892612104  0.4061881  0.7426021  0.32419540
  0.0000000   7.564633276  0.4089439  0.7403284  0.32642816
  0.0000000   8.302175681  0.4115067  0.7382109  0.32850613
  0.0000000   9.111627561  0.4138865  0.7362424  0.33043876
  0.0000000  10.000000000  0.4160930  0.7344155  0.33223076
  0.1111111   0.001000000  0.1212244  0.9247599  0.08667484
  0.1111111   0.001097499  0.1212244  0.9247599  0.08667484
  0.1111111   0.001204504  0.1212244  0.9247599  0.08667484
  0.1111111   0.001321941  0.1212244  0.9247599  0.08667484
  0.1111111   0.001450829  0.1212253  0.9247594  0.08667531
  0.1111111   0.001592283  0.1212316  0.9247551  0.08667904
  0.1111111   0.001747528  0.1212391  0.9247500  0.08668362
  0.1111111   0.001917910  0.1212477  0.9247444  0.08668905
  0.1111111   0.002104904  0.1212576  0.9247380  0.08669519
  0.1111111   0.002310130  0.1212708  0.9247287  0.08670357
  0.1111111   0.002535364  0.1212851  0.9247200  0.08671278
  0.1111111   0.002782559  0.1213038  0.9247078  0.08672566
  0.1111111   0.003053856  0.1213264  0.9246932  0.08674138
  0.1111111   0.003351603  0.1213535  0.9246757  0.08676010
  0.1111111   0.003678380  0.1213861  0.9246547  0.08678325
  0.1111111   0.004037017  0.1214253  0.9246296  0.08681115
  0.1111111   0.004430621  0.1214723  0.9245995  0.08684541
  0.1111111   0.004862602  0.1215285  0.9245636  0.08688706
  0.1111111   0.005336699  0.1215955  0.9245211  0.08693708
  0.1111111   0.005857021  0.1216734  0.9244725  0.08699575
  0.1111111   0.006428073  0.1217640  0.9244167  0.08706176
  0.1111111   0.007054802  0.1218723  0.9243517  0.08714020
  0.1111111   0.007742637  0.1220023  0.9242730  0.08723349
  0.1111111   0.008497534  0.1221574  0.9241789  0.08734784
  0.1111111   0.009326033  0.1223418  0.9240671  0.08748788
  0.1111111   0.010235310  0.1225610  0.9239338  0.08765828
  0.1111111   0.011233240  0.1228206  0.9237757  0.08786128
  0.1111111   0.012328467  0.1231251  0.9235903  0.08810148
  0.1111111   0.013530478  0.1234858  0.9233706  0.08839310
  0.1111111   0.014849683  0.1239090  0.9231112  0.08873575
  0.1111111   0.016297508  0.1244137  0.9227994  0.08913617
  0.1111111   0.017886495  0.1250059  0.9224297  0.08961301
  0.1111111   0.019630407  0.1256982  0.9219954  0.09017614
  0.1111111   0.021544347  0.1265143  0.9214749  0.09085306
  0.1111111   0.023644894  0.1274679  0.9208584  0.09165533
  0.1111111   0.025950242  0.1285776  0.9201331  0.09258548
  0.1111111   0.028480359  0.1298571  0.9192845  0.09366354
  0.1111111   0.031257158  0.1313419  0.9182803  0.09491783
  0.1111111   0.034304693  0.1330534  0.9170988  0.09637225
  0.1111111   0.037649358  0.1350118  0.9157160  0.09803835
  0.1111111   0.041320124  0.1372378  0.9141128  0.09990429
  0.1111111   0.045348785  0.1397448  0.9122722  0.10199035
  0.1111111   0.049770236  0.1425567  0.9101670  0.10431658
  0.1111111   0.054622772  0.1456757  0.9077977  0.10690385
  0.1111111   0.059948425  0.1490664  0.9052449  0.10972602
  0.1111111   0.065793322  0.1526953  0.9025983  0.11274363
  0.1111111   0.072208090  0.1565559  0.8999013  0.11596297
  0.1111111   0.079248290  0.1606923  0.8971276  0.11941841
  0.1111111   0.086974900  0.1652388  0.8939947  0.12322742
  0.1111111   0.095454846  0.1702413  0.8904195  0.12746901
  0.1111111   0.104761575  0.1756154  0.8865688  0.13200873
  0.1111111   0.114975700  0.1812278  0.8828346  0.13672245
  0.1111111   0.126185688  0.1872798  0.8787716  0.14179500
  0.1111111   0.138488637  0.1937847  0.8743191  0.14724908
  0.1111111   0.151991108  0.2007984  0.8692493  0.15313792
  0.1111111   0.166810054  0.2083792  0.8633033  0.15946987
  0.1111111   0.183073828  0.2165454  0.8562948  0.16629903
  0.1111111   0.200923300  0.2253000  0.8479675  0.17364441
  0.1111111   0.220513074  0.2344633  0.8387523  0.18128129
  0.1111111   0.242012826  0.2438032  0.8294846  0.18904129
  0.1111111   0.265608778  0.2535871  0.8188895  0.19715695
  0.1111111   0.291505306  0.2636993  0.8071145  0.20551204
  0.1111111   0.319926714  0.2741153  0.7940282  0.21409987
  0.1111111   0.351119173  0.2847082  0.7799562  0.22286315
  0.1111111   0.385352859  0.2954869  0.7645372  0.23179262
  0.1111111   0.422924287  0.3066095  0.7461381  0.24098325
  0.1111111   0.464158883  0.3181258  0.7231542  0.25046063
  0.1111111   0.509413801  0.3299815  0.6942349  0.26020522
  0.1111111   0.559081018  0.3419844  0.6587992  0.27006763
  0.1111111   0.613590727  0.3534831  0.6214239  0.27955448
  0.1111111   0.673415066  0.3639929  0.5889376  0.28828655
  0.1111111   0.739072203  0.3733378  0.5641600  0.29609432
  0.1111111   0.811130831  0.3815679  0.5498502  0.30290536
  0.1111111   0.890215085  0.3896798  0.5334248  0.30961589
  0.1111111   0.977009957  0.3976221  0.5136393  0.31618662
  0.1111111   1.072267222  0.4048771  0.5025919  0.32221816
  0.1111111   1.176811952  0.4119484  0.4881507  0.32815474
  0.1111111   1.291549665  0.4186307  0.4710747  0.33383641
  0.1111111   1.417474163  0.4250836  0.4411928  0.33933744
  0.1111111   1.555676144  0.4305980  0.4235564  0.34401198
  0.1111111   1.707352647  0.4357652  0.3723274  0.34836787
  0.1111111   1.873817423  0.4392024  0.2635578  0.35113247
  0.1111111   2.056512308  0.4411105  0.2443734  0.35246376
  0.1111111   2.257019720  0.4411191        NaN  0.35246969
  0.1111111   2.477076356  0.4411191        NaN  0.35246969
  0.1111111   2.718588243  0.4411191        NaN  0.35246969
  0.1111111   2.983647240  0.4411191        NaN  0.35246969
  0.1111111   3.274549163  0.4411191        NaN  0.35246969
  0.1111111   3.593813664  0.4411191        NaN  0.35246969
  0.1111111   3.944206059  0.4411191        NaN  0.35246969
  0.1111111   4.328761281  0.4411191        NaN  0.35246969
  0.1111111   4.750810162  0.4411191        NaN  0.35246969
  0.1111111   5.214008288  0.4411191        NaN  0.35246969
  0.1111111   5.722367659  0.4411191        NaN  0.35246969
  0.1111111   6.280291442  0.4411191        NaN  0.35246969
  0.1111111   6.892612104  0.4411191        NaN  0.35246969
  0.1111111   7.564633276  0.4411191        NaN  0.35246969
  0.1111111   8.302175681  0.4411191        NaN  0.35246969
  0.1111111   9.111627561  0.4411191        NaN  0.35246969
  0.1111111  10.000000000  0.4411191        NaN  0.35246969
 [ reached getOption("max.print") -- omitted 800 rows ]

RMSE was used to select the optimal model using the smallest value.
The final values used for the model were alpha = 0.1111111 and lambda = 0.001321941.
enetModel$bestTune
enetLambda<-enetModel$bestTune$lambda
enetAlpha<-enetModel$bestTune$alpha

enetPredector<-setdiff(names(carbonTrainData),"CarbonEmission")

enetFinalModel<-glmnet(as.matrix(carbonTrainData[,enetPredector]),carbonTrainData[,"CarbonEmission"], alpha = enetAlpha,lambda = enetLambda, family = "gaussian")
Warning: NAs introduced by coercion
enetFinalModel

Call:  glmnet(x = as.matrix(carbonTrainData[, enetPredector]), y = carbonTrainData[,      "CarbonEmission"], family = "gaussian", alpha = enetAlpha,      lambda = enetLambda) 

  Df  %Dev   Lambda
1 17 38.08 0.001322
carbonTrainData$`No Recycling`
NULL

#Random Forest Model

library(randomForest)
set.seed(1)
randomForestModel<-randomForest(CarbonEmission~.,data = carbonTrainData)
randomForestModel

Call:
 randomForest(formula = CarbonEmission ~ ., data = carbonTrainData) 
               Type of random forest: regression
                     Number of trees: 500
No. of variables tried at each split: 9

          Mean of squared residuals: 0.01610385
                    % Var explained: 91.72
mRf<-train(CarbonEmission~.,
           data=carbonTrainData,
           method="rf",
           trControl=trainControl(method = "cv", number =5)
           )
mRf
Random Forest 

8001 samples
  28 predictor

No pre-processing
Resampling: Cross-Validated (5 fold) 
Summary of sample sizes: 6401, 6401, 6401, 6400, 6401 
Resampling results across tuning parameters:

  mtry  RMSE       Rsquared   MAE      
   2    0.2712051  0.8060857  0.2121779
  24    0.1382189  0.9061441  0.1065777
  47    0.1427589  0.8970490  0.1106657

RMSE was used to select the optimal model using the smallest value.
The final value used for the model was mtry = 24.
varImp(mRf)
rf variable importance

  only 20 most important variables shown (out of 47)
rfPred<-predict(mRf,newdata = carbonTestData)

MAE(carbonTestData$CarbonEmission,rfPred)
[1] 0.1042843
rmse(carbonTestData$CarbonEmission,rfPred)
[1] 0.005051824
cor(carbonTestData$CarbonEmission,rfPred)^2
[1] 0.907299
plot(carbonTestData$CarbonEmission,rfPred)

GBM

set.seed(1)

grBoostedTree<-train(
  CarbonEmission~.,
  data = carbonTrainData,
  method="gbm",
  trControl=trainControl(method = "cv",number = 5)
)
Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1863             nan     0.1000    0.0085
     2        0.1789             nan     0.1000    0.0070
     3        0.1715             nan     0.1000    0.0073
     4        0.1654             nan     0.1000    0.0060
     5        0.1595             nan     0.1000    0.0055
     6        0.1550             nan     0.1000    0.0042
     7        0.1503             nan     0.1000    0.0048
     8        0.1462             nan     0.1000    0.0039
     9        0.1424             nan     0.1000    0.0036
    10        0.1383             nan     0.1000    0.0041
    20        0.1140             nan     0.1000    0.0019
    40        0.0886             nan     0.1000    0.0009
    60        0.0720             nan     0.1000    0.0006
    80        0.0600             nan     0.1000    0.0005
   100        0.0513             nan     0.1000    0.0003
   120        0.0449             nan     0.1000    0.0002
   140        0.0399             nan     0.1000    0.0002
   150        0.0379             nan     0.1000    0.0001

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1795             nan     0.1000    0.0151
     2        0.1671             nan     0.1000    0.0126
     3        0.1571             nan     0.1000    0.0100
     4        0.1486             nan     0.1000    0.0084
     5        0.1413             nan     0.1000    0.0076
     6        0.1348             nan     0.1000    0.0065
     7        0.1286             nan     0.1000    0.0061
     8        0.1230             nan     0.1000    0.0053
     9        0.1183             nan     0.1000    0.0046
    10        0.1136             nan     0.1000    0.0045
    20        0.0847             nan     0.1000    0.0020
    40        0.0562             nan     0.1000    0.0010
    60        0.0409             nan     0.1000    0.0005
    80        0.0320             nan     0.1000    0.0003
   100        0.0261             nan     0.1000    0.0002
   120        0.0222             nan     0.1000    0.0001
   140        0.0192             nan     0.1000    0.0001
   150        0.0180             nan     0.1000    0.0001

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1759             nan     0.1000    0.0185
     2        0.1607             nan     0.1000    0.0156
     3        0.1479             nan     0.1000    0.0123
     4        0.1379             nan     0.1000    0.0099
     5        0.1281             nan     0.1000    0.0096
     6        0.1206             nan     0.1000    0.0076
     7        0.1141             nan     0.1000    0.0063
     8        0.1084             nan     0.1000    0.0055
     9        0.1028             nan     0.1000    0.0054
    10        0.0983             nan     0.1000    0.0045
    20        0.0700             nan     0.1000    0.0019
    40        0.0422             nan     0.1000    0.0010
    60        0.0296             nan     0.1000    0.0004
    80        0.0224             nan     0.1000    0.0003
   100        0.0177             nan     0.1000    0.0001
   120        0.0147             nan     0.1000    0.0001
   140        0.0123             nan     0.1000    0.0001
   150        0.0113             nan     0.1000    0.0001

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1868             nan     0.1000    0.0088
     2        0.1792             nan     0.1000    0.0074
     3        0.1718             nan     0.1000    0.0074
     4        0.1659             nan     0.1000    0.0061
     5        0.1597             nan     0.1000    0.0057
     6        0.1548             nan     0.1000    0.0048
     7        0.1498             nan     0.1000    0.0047
     8        0.1457             nan     0.1000    0.0037
     9        0.1416             nan     0.1000    0.0040
    10        0.1381             nan     0.1000    0.0034
    20        0.1136             nan     0.1000    0.0017
    40        0.0888             nan     0.1000    0.0010
    60        0.0722             nan     0.1000    0.0008
    80        0.0605             nan     0.1000    0.0004
   100        0.0517             nan     0.1000    0.0003
   120        0.0453             nan     0.1000    0.0003
   140        0.0404             nan     0.1000    0.0002
   150        0.0384             nan     0.1000    0.0002

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1801             nan     0.1000    0.0157
     2        0.1670             nan     0.1000    0.0129
     3        0.1565             nan     0.1000    0.0105
     4        0.1480             nan     0.1000    0.0087
     5        0.1408             nan     0.1000    0.0072
     6        0.1344             nan     0.1000    0.0063
     7        0.1280             nan     0.1000    0.0062
     8        0.1230             nan     0.1000    0.0052
     9        0.1184             nan     0.1000    0.0044
    10        0.1143             nan     0.1000    0.0043
    20        0.0856             nan     0.1000    0.0024
    40        0.0575             nan     0.1000    0.0009
    60        0.0413             nan     0.1000    0.0009
    80        0.0322             nan     0.1000    0.0003
   100        0.0262             nan     0.1000    0.0002
   120        0.0220             nan     0.1000    0.0002
   140        0.0190             nan     0.1000    0.0001
   150        0.0178             nan     0.1000    0.0001

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1769             nan     0.1000    0.0188
     2        0.1610             nan     0.1000    0.0154
     3        0.1482             nan     0.1000    0.0127
     4        0.1381             nan     0.1000    0.0102
     5        0.1288             nan     0.1000    0.0094
     6        0.1210             nan     0.1000    0.0078
     7        0.1147             nan     0.1000    0.0060
     8        0.1092             nan     0.1000    0.0056
     9        0.1037             nan     0.1000    0.0056
    10        0.0988             nan     0.1000    0.0049
    20        0.0701             nan     0.1000    0.0022
    40        0.0429             nan     0.1000    0.0008
    60        0.0298             nan     0.1000    0.0005
    80        0.0227             nan     0.1000    0.0002
   100        0.0180             nan     0.1000    0.0001
   120        0.0147             nan     0.1000    0.0001
   140        0.0123             nan     0.1000    0.0001
   150        0.0114             nan     0.1000    0.0001

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1853             nan     0.1000    0.0088
     2        0.1776             nan     0.1000    0.0076
     3        0.1703             nan     0.1000    0.0076
     4        0.1647             nan     0.1000    0.0058
     5        0.1587             nan     0.1000    0.0062
     6        0.1535             nan     0.1000    0.0049
     7        0.1487             nan     0.1000    0.0049
     8        0.1448             nan     0.1000    0.0040
     9        0.1406             nan     0.1000    0.0039
    10        0.1369             nan     0.1000    0.0035
    20        0.1125             nan     0.1000    0.0016
    40        0.0879             nan     0.1000    0.0009
    60        0.0712             nan     0.1000    0.0006
    80        0.0596             nan     0.1000    0.0005
   100        0.0509             nan     0.1000    0.0003
   120        0.0445             nan     0.1000    0.0002
   140        0.0398             nan     0.1000    0.0002
   150        0.0378             nan     0.1000    0.0001

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1787             nan     0.1000    0.0155
     2        0.1655             nan     0.1000    0.0128
     3        0.1553             nan     0.1000    0.0102
     4        0.1472             nan     0.1000    0.0081
     5        0.1393             nan     0.1000    0.0078
     6        0.1329             nan     0.1000    0.0065
     7        0.1268             nan     0.1000    0.0060
     8        0.1213             nan     0.1000    0.0052
     9        0.1162             nan     0.1000    0.0051
    10        0.1117             nan     0.1000    0.0044
    20        0.0828             nan     0.1000    0.0016
    40        0.0554             nan     0.1000    0.0009
    60        0.0401             nan     0.1000    0.0004
    80        0.0314             nan     0.1000    0.0002
   100        0.0257             nan     0.1000    0.0002
   120        0.0217             nan     0.1000    0.0001
   140        0.0189             nan     0.1000    0.0001
   150        0.0178             nan     0.1000    0.0001

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1755             nan     0.1000    0.0190
     2        0.1596             nan     0.1000    0.0156
     3        0.1468             nan     0.1000    0.0129
     4        0.1366             nan     0.1000    0.0104
     5        0.1273             nan     0.1000    0.0091
     6        0.1201             nan     0.1000    0.0073
     7        0.1126             nan     0.1000    0.0074
     8        0.1066             nan     0.1000    0.0058
     9        0.1019             nan     0.1000    0.0045
    10        0.0979             nan     0.1000    0.0040
    20        0.0685             nan     0.1000    0.0020
    40        0.0416             nan     0.1000    0.0010
    60        0.0289             nan     0.1000    0.0004
    80        0.0221             nan     0.1000    0.0002
   100        0.0173             nan     0.1000    0.0001
   120        0.0141             nan     0.1000    0.0001
   140        0.0119             nan     0.1000    0.0001
   150        0.0110             nan     0.1000    0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1855             nan     0.1000    0.0088
     2        0.1782             nan     0.1000    0.0075
     3        0.1710             nan     0.1000    0.0069
     4        0.1648             nan     0.1000    0.0060
     5        0.1589             nan     0.1000    0.0056
     6        0.1542             nan     0.1000    0.0046
     7        0.1493             nan     0.1000    0.0049
     8        0.1452             nan     0.1000    0.0040
     9        0.1414             nan     0.1000    0.0035
    10        0.1375             nan     0.1000    0.0037
    20        0.1132             nan     0.1000    0.0020
    40        0.0882             nan     0.1000    0.0009
    60        0.0717             nan     0.1000    0.0007
    80        0.0599             nan     0.1000    0.0005
   100        0.0512             nan     0.1000    0.0003
   120        0.0448             nan     0.1000    0.0003
   140        0.0399             nan     0.1000    0.0002
   150        0.0380             nan     0.1000    0.0001

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1787             nan     0.1000    0.0152
     2        0.1663             nan     0.1000    0.0125
     3        0.1559             nan     0.1000    0.0102
     4        0.1475             nan     0.1000    0.0081
     5        0.1400             nan     0.1000    0.0073
     6        0.1339             nan     0.1000    0.0061
     7        0.1277             nan     0.1000    0.0063
     8        0.1219             nan     0.1000    0.0056
     9        0.1171             nan     0.1000    0.0049
    10        0.1125             nan     0.1000    0.0045
    20        0.0836             nan     0.1000    0.0017
    40        0.0552             nan     0.1000    0.0010
    60        0.0402             nan     0.1000    0.0005
    80        0.0312             nan     0.1000    0.0003
   100        0.0253             nan     0.1000    0.0002
   120        0.0214             nan     0.1000    0.0001
   140        0.0187             nan     0.1000    0.0001
   150        0.0176             nan     0.1000    0.0001

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1756             nan     0.1000    0.0190
     2        0.1605             nan     0.1000    0.0152
     3        0.1477             nan     0.1000    0.0125
     4        0.1376             nan     0.1000    0.0102
     5        0.1289             nan     0.1000    0.0088
     6        0.1207             nan     0.1000    0.0082
     7        0.1138             nan     0.1000    0.0068
     8        0.1072             nan     0.1000    0.0066
     9        0.1016             nan     0.1000    0.0053
    10        0.0970             nan     0.1000    0.0045
    20        0.0687             nan     0.1000    0.0019
    40        0.0416             nan     0.1000    0.0010
    60        0.0291             nan     0.1000    0.0005
    80        0.0221             nan     0.1000    0.0002
   100        0.0174             nan     0.1000    0.0002
   120        0.0141             nan     0.1000    0.0001
   140        0.0120             nan     0.1000    0.0001
   150        0.0112             nan     0.1000    0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1844             nan     0.1000    0.0087
     2        0.1767             nan     0.1000    0.0075
     3        0.1697             nan     0.1000    0.0071
     4        0.1636             nan     0.1000    0.0060
     5        0.1576             nan     0.1000    0.0059
     6        0.1528             nan     0.1000    0.0046
     7        0.1479             nan     0.1000    0.0048
     8        0.1439             nan     0.1000    0.0039
     9        0.1400             nan     0.1000    0.0038
    10        0.1367             nan     0.1000    0.0031
    20        0.1128             nan     0.1000    0.0018
    40        0.0879             nan     0.1000    0.0009
    60        0.0716             nan     0.1000    0.0005
    80        0.0597             nan     0.1000    0.0004
   100        0.0512             nan     0.1000    0.0004
   120        0.0449             nan     0.1000    0.0002
   140        0.0399             nan     0.1000    0.0002
   150        0.0380             nan     0.1000    0.0002

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1783             nan     0.1000    0.0152
     2        0.1660             nan     0.1000    0.0123
     3        0.1558             nan     0.1000    0.0101
     4        0.1472             nan     0.1000    0.0084
     5        0.1396             nan     0.1000    0.0074
     6        0.1334             nan     0.1000    0.0063
     7        0.1275             nan     0.1000    0.0059
     8        0.1228             nan     0.1000    0.0048
     9        0.1170             nan     0.1000    0.0055
    10        0.1124             nan     0.1000    0.0046
    20        0.0830             nan     0.1000    0.0015
    40        0.0543             nan     0.1000    0.0013
    60        0.0399             nan     0.1000    0.0005
    80        0.0315             nan     0.1000    0.0002
   100        0.0258             nan     0.1000    0.0003
   120        0.0218             nan     0.1000    0.0001
   140        0.0188             nan     0.1000    0.0001
   150        0.0176             nan     0.1000    0.0001

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1748             nan     0.1000    0.0189
     2        0.1592             nan     0.1000    0.0155
     3        0.1466             nan     0.1000    0.0128
     4        0.1365             nan     0.1000    0.0101
     5        0.1276             nan     0.1000    0.0088
     6        0.1204             nan     0.1000    0.0071
     7        0.1137             nan     0.1000    0.0065
     8        0.1080             nan     0.1000    0.0056
     9        0.1028             nan     0.1000    0.0051
    10        0.0975             nan     0.1000    0.0051
    20        0.0701             nan     0.1000    0.0019
    40        0.0433             nan     0.1000    0.0008
    60        0.0304             nan     0.1000    0.0006
    80        0.0232             nan     0.1000    0.0003
   100        0.0183             nan     0.1000    0.0001
   120        0.0151             nan     0.1000    0.0001
   140        0.0127             nan     0.1000    0.0001
   150        0.0118             nan     0.1000    0.0001

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.1753             nan     0.1000    0.0188
     2        0.1599             nan     0.1000    0.0153
     3        0.1472             nan     0.1000    0.0125
     4        0.1372             nan     0.1000    0.0100
     5        0.1289             nan     0.1000    0.0084
     6        0.1199             nan     0.1000    0.0087
     7        0.1130             nan     0.1000    0.0069
     8        0.1068             nan     0.1000    0.0060
     9        0.1018             nan     0.1000    0.0051
    10        0.0977             nan     0.1000    0.0040
    20        0.0699             nan     0.1000    0.0018
    40        0.0427             nan     0.1000    0.0007
    60        0.0297             nan     0.1000    0.0005
    80        0.0229             nan     0.1000    0.0003
   100        0.0185             nan     0.1000    0.0002
   120        0.0150             nan     0.1000    0.0002
   140        0.0125             nan     0.1000    0.0000
   150        0.0116             nan     0.1000    0.0001
grBoostedTree
Stochastic Gradient Boosting 

8001 samples
  28 predictor

No pre-processing
Resampling: Cross-Validated (5 fold) 
Summary of sample sizes: 6400, 6402, 6400, 6401, 6401 
Resampling results across tuning parameters:

  interaction.depth  n.trees  RMSE       Rsquared   MAE       
  1                   50      0.2825681  0.6848913  0.21910129
  1                  100      0.2276478  0.7963534  0.17429897
  1                  150      0.1965925  0.8385292  0.14854439
  2                   50      0.2193290  0.8179728  0.16722911
  2                  100      0.1644147  0.8876415  0.12355000
  2                  150      0.1375675  0.9160870  0.10256446
  3                   50      0.1897724  0.8600842  0.14451760
  3                  100      0.1377020  0.9190183  0.10349530
  3                  150      0.1115734  0.9430885  0.08264941

Tuning parameter 'shrinkage' was held constant at a value of 0.1
Tuning parameter 'n.minobsinnode' was held constant at a value of 10
RMSE was used to select the optimal model using the smallest value.
The final values used for the model were n.trees = 150, interaction.depth = 3, shrinkage = 0.1 and n.minobsinnode = 10.
gbmPred<-predict(grBoostedTree, carbonTestData)
MAE(carbonTestData$CarbonEmission,gbmPred)
[1] 0.08071192
rmse(carbonTestData$CarbonEmission,gbmPred)
[1] 0.0006674298
cor(carbonTestData$CarbonEmission,gbmPred)^2
[1] 0.9428016
plot(carbonTestData$CarbonEmission,gbmPred)

#SV Linear Model

set.seed(1)

svmLinear<-train(
  CarbonEmission~.,
  data = carbonTrainData,
  method="svmLinear",
  trControl=trainControl(method = "cv",number = 5, preProc=c("center","scale"))
)
svmLinear
Support Vector Machines with Linear Kernel 

8001 samples
  28 predictor

No pre-processing
Resampling: Cross-Validated (5 fold) 
Summary of sample sizes: 6400, 6402, 6400, 6401, 6401 
Resampling results:

  RMSE       Rsquared   MAE       
  0.1216608  0.9244839  0.08585286

Tuning parameter 'C' was held constant at a value of 1
svmPred<-predict(svmLinear,carbonTestData)

plot(svmPred,carbonTestData$CarbonEmission)

#SVM Radial Model

set.seed(1)

svmRadial<-train(
  CarbonEmission~.,
  data = carbonTrainData,
  method="svmRadial",
  trControl=trainControl(method = "cv",number = 5, preProc=c("center","scale"))
)
svmRadial
Support Vector Machines with Radial Basis Function Kernel 

8001 samples
  28 predictor

No pre-processing
Resampling: Cross-Validated (5 fold) 
Summary of sample sizes: 6400, 6402, 6400, 6401, 6401 
Resampling results across tuning parameters:

  C     RMSE        Rsquared   MAE       
  0.25  0.08190788  0.9678058  0.05674104
  0.50  0.06914040  0.9764828  0.04865093
  1.00  0.06154311  0.9809844  0.04422410

Tuning parameter 'sigma' was held constant at a value of 0.01148182
RMSE was used to select the optimal model using the smallest value.
The final values used for the model were sigma = 0.01148182 and C = 1.
svmRadialPred<-predict(svmRadial,carbonTestData)

plot(svmRadialPred,carbonTestData$CarbonEmission)

#Comparing models

compare=resamples(list(KNN=knnModel,LIN=lmModel,stepWise=stepwiseModel,Lasso=lassoModel,Ridge=ridgeModel,Enet=enetModel,RF=mRf,GBM=grBoostedTree,SVML=svmLinear,SVMR=svmRadial))
summary(compare) # Out of all the models SVM Radial stands out the most

Call:
summary.resamples(object = compare)

Models: KNN, LIN, stepWise, Lasso, Ridge, Enet, RF, GBM, SVML, SVMR 
Number of resamples: 5 

MAE 
               Min.    1st Qu.     Median       Mean    3rd Qu.       Max. NA's
KNN      0.29623471 0.29740921 0.30499011 0.30249251 0.30575296 0.30807555    0
LIN      0.08480145 0.08612879 0.08704453 0.08667648 0.08732807 0.08807956    0
stepWise 0.27060240 0.27186835 0.27300079 0.27312929 0.27490680 0.27526812    0
Lasso    0.08496466 0.08611094 0.08720014 0.08687629 0.08723989 0.08886584    0
Ridge    0.08691866 0.08723220 0.08858797 0.08856450 0.08950638 0.09057731    0
Enet     0.08445732 0.08608927 0.08684209 0.08667484 0.08711682 0.08886872    0
RF       0.10394857 0.10611419 0.10628345 0.10657774 0.10707089 0.10947160    0
GBM      0.08008938 0.08165398 0.08346939 0.08264941 0.08390377 0.08413051    0
SVML     0.08366201 0.08516816 0.08566449 0.08585286 0.08648885 0.08828078    0
SVMR     0.04362895 0.04386712 0.04451333 0.04422410 0.04452306 0.04458802    0

RMSE 
               Min.    1st Qu.     Median       Mean    3rd Qu.       Max. NA's
KNN      0.37137353 0.38041941 0.38481702 0.38142506 0.38485362 0.38566170    0
LIN      0.11859265 0.12133365 0.12142487 0.12108930 0.12164347 0.12245187    0
stepWise 0.33950584 0.34519992 0.34662453 0.34713775 0.35174536 0.35261310    0
Lasso    0.11828998 0.12011679 0.12300251 0.12149735 0.12301315 0.12306432    0
Ridge    0.11999485 0.12265571 0.12476610 0.12362958 0.12481897 0.12591227    0
Enet     0.11826611 0.11953850 0.12259232 0.12122440 0.12285103 0.12287402    0
RF       0.13240909 0.13682774 0.13761090 0.13821888 0.14192035 0.14232634    0
GBM      0.10729648 0.11057936 0.11283863 0.11157342 0.11325280 0.11389984    0
SVML     0.11867684 0.11977993 0.12295302 0.12166077 0.12340309 0.12349097    0
SVMR     0.06067301 0.06135216 0.06154531 0.06154311 0.06180787 0.06233721    0

Rsquared 
              Min.   1st Qu.    Median      Mean   3rd Qu.      Max. NA's
KNN      0.2445364 0.2467487 0.2682413 0.2635529 0.2744761 0.2837618    0
LIN      0.9185288 0.9241334 0.9249591 0.9245749 0.9264152 0.9288379    0
stepWise 0.3578135 0.3697142 0.3798824 0.3809492 0.3880434 0.4092925    0
Lasso    0.9224198 0.9225172 0.9253228 0.9245000 0.9257084 0.9265317    0
Ridge    0.9216849 0.9217637 0.9243347 0.9235380 0.9248964 0.9250104    0
Enet     0.9225978 0.9226924 0.9254929 0.9247599 0.9258395 0.9271770    0
RF       0.9001725 0.9046329 0.9068228 0.9061441 0.9088380 0.9102542    0
GBM      0.9407131 0.9426509 0.9428430 0.9430885 0.9438202 0.9454152    0
SVML     0.9220482 0.9224530 0.9249043 0.9244839 0.9254125 0.9276015    0
SVMR     0.9805677 0.9807548 0.9809038 0.9809844 0.9810939 0.9816018    0

Neural Network Preprocessing

library(caret)
carbonInd<-createDataPartition(carbonTrainData$CarbonEmission,p=0.9,list = FALSE)
carbonIndex<-which(names(carbonTrainData)=='CarbonEmission')

carbonTrainingData<-carbonTrainData[carbonInd,-carbonIndex]
str(carbonTrainingData)
'data.frame':   7202 obs. of  28 variables:
 $ Body.Type                    : Factor w/ 4 levels "normal","obese",..: 3 2 3 2 3 4 4 3 4 1 ...
 $ Sex                          : Factor w/ 2 levels "female","male": 1 1 2 1 2 1 1 2 1 1 ...
 $ Diet                         : Factor w/ 4 levels "omnivore","pescatarian",..: 2 4 1 4 4 3 3 1 2 4 ...
 $ How.Often.Shower             : Factor w/ 4 levels "daily","less frequently",..: 1 2 4 1 2 2 3 1 1 3 ...
 $ Heating.Energy.Source        : Factor w/ 4 levels "coal","electricity",..: 1 3 4 1 4 4 1 4 4 4 ...
 $ Transport                    : Factor w/ 3 levels "private","public",..: 2 3 3 1 2 1 3 2 2 2 ...
 $ Vehicle.Type                 : Factor w/ 6 levels "diesel","electric",..: 3 3 3 1 3 4 3 3 3 3 ...
 $ Social.Activity              : Factor w/ 3 levels "never","often",..: 2 2 3 2 3 1 3 1 2 1 ...
 $ Monthly.Grocery.Bill         : num  230 114 157 266 144 56 59 200 135 146 ...
 $ Frequency.of.Traveling.by.Air: Factor w/ 4 levels "frequently","never",..: 1 3 3 4 1 3 4 1 3 2 ...
 $ Vehicle.Monthly.Distance.Km  : num  210 9 74 8457 658 ...
 $ Waste.Bag.Size               : Factor w/ 4 levels "extra large",..: 2 1 3 2 2 3 1 3 1 1 ...
 $ Waste.Bag.Weekly.Count       : num  4 3 3 1 1 4 3 3 1 4 ...
 $ How.Long.TV.PC.Daily.Hour    : num  7 9 20 3 22 9 5 3 8 12 ...
 $ How.Many.New.Clothes.Monthly : num  26 38 5 5 18 11 39 31 23 27 ...
 $ How.Long.Internet.Daily.Hour : num  1 5 7 6 9 19 15 15 18 21 ...
 $ Energy.efficiency            : Factor w/ 3 levels "No","Sometimes",..: 1 1 2 3 2 2 1 3 2 1 ...
 $ Metal                        : num  1 1 1 0 1 0 0 0 0 0 ...
 $ Paper                        : num  0 0 1 1 1 0 1 0 0 1 ...
 $ Plastic                      : num  0 0 1 0 0 0 1 0 0 1 ...
 $ Glass                        : num  0 0 1 0 1 0 1 1 1 0 ...
 $ No_Recycling                 : num  0 0 0 0 0 1 0 0 0 0 ...
 $ Stove                        : num  1 1 0 0 1 0 1 0 0 1 ...
 $ Oven                         : num  1 0 0 1 1 0 0 0 0 0 ...
 $ Microwave                    : num  0 1 1 0 1 0 1 1 1 1 ...
 $ Grill                        : num  0 0 1 0 0 1 0 1 1 0 ...
 $ Airfryer                     : num  0 0 1 0 0 1 0 1 1 0 ...
 $ No_Cooking                   : num  0 0 0 0 0 0 0 0 0 0 ...
carbonTrainingLabels<-carbonTrainData[carbonInd,carbonIndex]
str(carbonTrainingLabels)
 num [1:7202] 7.71 7.55 6.98 8.46 7.41 ...
carbonValidationData<-carbonTrainData[-carbonInd,-carbonIndex]
carbonValidationData

carbonValidationLabels<-carbonTrainData[-carbonInd,carbonIndex]
str(carbonValidationLabels)
 num [1:799] 6.93 7.5 7.41 7.51 7.13 ...
carbonTestingData<-carbonTestData[,-carbonIndex]
carbonTestingData

carbonTestingLabels<-carbonTestData[,carbonIndex]
str(carbonTestingLabels)
 num [1:1999] 7.86 6.56 7.6 7.35 8.27 ...
dim(carbonTrainingData)
[1] 7202   28
dim(carbonTestingData)
[1] 1999   28

#Scaling numeric Variables and one hot encoding categorical variables

library(mltools)
library(data.table)
numericCols<-c("Monthly.Grocery.Bill","Vehicle.Monthly.Distance.Km","Waste.Bag.Weekly.Count",
               "How.Long.TV.PC.Daily.Hour","How.Many.New.Clothes.Monthly","How.Long.Internet.Daily.Hour","Metal","Paper","Plastic","Glass","Stove","Oven"
               ,"Microwave","Grill","Airfryer","No_Cooking","No_Recycling")

categoricalCols<-c("Body.Type","Sex","Diet","How.Often.Shower","Heating.Energy.Source","Transport","Vehicle.Type","Social.Activity",
                   "Frequency.of.Traveling.by.Air","Waste.Bag.Size","Energy.efficiency")

carbonTrainingDataNew<-scale(carbonTrainingData[,numericCols])
colMeanTrain<-attr(carbonTrainingDataNew,"scaled:center")
colStddevsTrain<-attr(carbonTrainingDataNew,"scaled:scale")


carbonTrainingData[,numericCols]<-carbonTrainingDataNew
carbonValidationData[,numericCols]<-scale(carbonValidationData[,numericCols],center = colMeanTrain,scale = colStddevsTrain)
carbonTestingData[,numericCols]<-scale(carbonTestingData[,numericCols],center = colMeanTrain,scale = colStddevsTrain)

carbonTrainingTable<-as.data.table(carbonTrainingData)
carbonValidationTable<-as.data.table(carbonValidationData)
carbonTestingTable<-as.data.table(carbonTestingData)

carbonTrainingOneHot<-one_hot(carbonTrainingTable,naCols=FALSE,dropCols=TRUE,dropUnusedLevels=TRUE)
carbonTrainingOneHot

carbonValidationOneHot<-one_hot(carbonValidationTable,naCols=FALSE,dropCols=TRUE,dropUnusedLevels=TRUE)
carbonValidationOneHot

carbonTestingOneHot<-one_hot(carbonTestingTable,naCols=FALSE,dropCols=TRUE,dropUnusedLevels=TRUE)
carbonTestingOneHot

carbonTrainingFinal<-as.data.frame(cbind(carbonTrainingTable[, ..numericCols], carbonTrainingOneHot))
carbonTrainingFinal

carbonValidationFinal<-as.data.frame(cbind(carbonValidationTable[, ..numericCols], carbonValidationOneHot))
carbonValidationFinal

carbonTestingFinal<-as.data.frame(cbind(carbonTestingTable[, ..numericCols], carbonTestingOneHot))
carbonTestingFinal
library(keras)

model<-keras_model_sequential()%>%
  layer_dense(units = 32,activation = "relu",input_shape = dim(carbonTrainingFinal)[2])%>%
  layer_dropout(rate=0.3)%>%
  layer_dense(units = 32,activation = "relu")%>%
  layer_dropout(rate=0.3)%>%
  layer_dense(units = 16,activation = "relu")%>%
  layer_dropout(rate=0.3)%>%
  layer_dense(units = 1)
/Users/angadsingh/.virtualenvs/r-tensorflow/lib/python3.9/site-packages/urllib3/__init__.py:35: NotOpenSSLWarning: urllib3 v2 only supports OpenSSL 1.1.1+, currently the 'ssl' module is compiled with 'LibreSSL 2.8.3'. See: https://github.com/urllib3/urllib3/issues/3020
  warnings.warn(
2024-05-06 19:15:32.845040: I metal_plugin/src/device/metal_device.cc:1154] Metal device set to: Apple M2 Pro
2024-05-06 19:15:32.845063: I metal_plugin/src/device/metal_device.cc:296] systemMemory: 16.00 GB
2024-05-06 19:15:32.845070: I metal_plugin/src/device/metal_device.cc:313] maxCacheSize: 5.33 GB
2024-05-06 19:15:32.845150: I tensorflow/core/common_runtime/pluggable_device/pluggable_device_factory.cc:303] Could not identify NUMA node of platform GPU ID 0, defaulting to 0. Your kernel may not have been built with NUMA support.
2024-05-06 19:15:32.845446: I tensorflow/core/common_runtime/pluggable_device/pluggable_device_factory.cc:269] Created TensorFlow device (/job:localhost/replica:0/task:0/device:GPU:0 with 0 MB memory) -> physical PluggableDevice (device: 0, name: METAL, pci bus id: <undefined>)
model %>% compile(
  loss="mse",
  optimizer=optimizer_adam(lr=0.001)
)
WARNING:absl:At this time, the v2.11+ optimizer `tf.keras.optimizers.Adam` runs slowly on M1/M2 Macs, please use the legacy Keras optimizer instead, located at `tf.keras.optimizers.legacy.Adam`.
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
WARNING:absl:There is a known slowdown when using v2.11+ Keras optimizers on M1/M2 Macs. Falling back to the legacy Keras optimizer, i.e., `tf.keras.optimizers.legacy.Adam`.
history<-model %>% fit(as.matrix(carbonTrainingFinal),
                       carbonTrainingLabels,
                       batch_size=50,
                       epochs=20,
                       validation_data=list(as.matrix(carbonValidationFinal),carbonValidationLabels)
                         )
Epoch 1/20
2024-05-06 19:15:34.205221: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.

  1/145 [..............................] - ETA: 6:28 - loss: 46.4678
 10/145 [=>............................] - ETA: 0s - loss: 49.9576  
 21/145 [===>..........................] - ETA: 0s - loss: 44.4478
 30/145 [=====>........................] - ETA: 0s - loss: 38.7821
 41/145 [=======>......................] - ETA: 0s - loss: 34.2338
 52/145 [=========>....................] - ETA: 0s - loss: 30.6068
 63/145 [============>.................] - ETA: 0s - loss: 27.5559
 74/145 [==============>...............] - ETA: 0s - loss: 25.2218
 84/145 [================>.............] - ETA: 0s - loss: 23.4696
 95/145 [==================>...........] - ETA: 0s - loss: 21.7564
106/145 [====================>.........] - ETA: 0s - loss: 20.4552
116/145 [=======================>......] - ETA: 0s - loss: 19.3248
126/145 [=========================>....] - ETA: 0s - loss: 18.3500
137/145 [===========================>..] - ETA: 0s - loss: 17.4256
145/145 [==============================] - 5s 17ms/step - loss: 16.9642
2024-05-06 19:15:39.198894: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.

145/145 [==============================] - 6s 22ms/step - loss: 16.9642 - val_loss: 1.2081
Epoch 2/20

  1/145 [..............................] - ETA: 1s - loss: 5.5795
  9/145 [>.............................] - ETA: 0s - loss: 6.7267
 19/145 [==>...........................] - ETA: 0s - loss: 6.5399
 29/145 [=====>........................] - ETA: 0s - loss: 6.4078
 40/145 [=======>......................] - ETA: 0s - loss: 6.3344
 51/145 [=========>....................] - ETA: 0s - loss: 6.2726
 61/145 [===========>..................] - ETA: 0s - loss: 6.1715
 72/145 [=============>................] - ETA: 0s - loss: 6.0768
 82/145 [===============>..............] - ETA: 0s - loss: 5.9246
 93/145 [==================>...........] - ETA: 0s - loss: 5.8156
104/145 [====================>.........] - ETA: 0s - loss: 5.7398
115/145 [======================>.......] - ETA: 0s - loss: 5.6460
126/145 [=========================>....] - ETA: 0s - loss: 5.5552
135/145 [==========================>...] - ETA: 0s - loss: 5.4703
144/145 [============================>.] - ETA: 0s - loss: 5.4012
145/145 [==============================] - 1s 5ms/step - loss: 5.4004

145/145 [==============================] - 1s 6ms/step - loss: 5.4004 - val_loss: 0.8976
Epoch 3/20

  1/145 [..............................] - ETA: 1s - loss: 3.9111
  9/145 [>.............................] - ETA: 0s - loss: 4.4293
 19/145 [==>...........................] - ETA: 0s - loss: 4.1524
 30/145 [=====>........................] - ETA: 0s - loss: 4.0496
 41/145 [=======>......................] - ETA: 0s - loss: 3.9257
 47/145 [========>.....................] - ETA: 0s - loss: 3.9645
 56/145 [==========>...................] - ETA: 0s - loss: 3.8986
 67/145 [============>.................] - ETA: 0s - loss: 3.8377
 77/145 [==============>...............] - ETA: 0s - loss: 3.8053
 87/145 [=================>............] - ETA: 0s - loss: 3.7650
 97/145 [===================>..........] - ETA: 0s - loss: 3.7438
108/145 [=====================>........] - ETA: 0s - loss: 3.7306
119/145 [=======================>......] - ETA: 0s - loss: 3.6962
130/145 [=========================>....] - ETA: 0s - loss: 3.6527
141/145 [============================>.] - ETA: 0s - loss: 3.6156
145/145 [==============================] - 1s 5ms/step - loss: 3.6040

145/145 [==============================] - 1s 6ms/step - loss: 3.6040 - val_loss: 0.6919
Epoch 4/20

  1/145 [..............................] - ETA: 1s - loss: 2.6878
  9/145 [>.............................] - ETA: 0s - loss: 2.9561
 19/145 [==>...........................] - ETA: 0s - loss: 2.8511
 29/145 [=====>........................] - ETA: 0s - loss: 2.7951
 40/145 [=======>......................] - ETA: 0s - loss: 2.7374
 51/145 [=========>....................] - ETA: 0s - loss: 2.7252
 61/145 [===========>..................] - ETA: 0s - loss: 2.6868
 69/145 [=============>................] - ETA: 0s - loss: 2.7536
 79/145 [===============>..............] - ETA: 0s - loss: 2.6883
 90/145 [=================>............] - ETA: 0s - loss: 2.6733
101/145 [===================>..........] - ETA: 0s - loss: 2.6547
112/145 [======================>.......] - ETA: 0s - loss: 2.6215
123/145 [========================>.....] - ETA: 0s - loss: 2.5796
133/145 [==========================>...] - ETA: 0s - loss: 2.5634
144/145 [============================>.] - ETA: 0s - loss: 2.5373
145/145 [==============================] - 1s 5ms/step - loss: 2.5369

145/145 [==============================] - 1s 6ms/step - loss: 2.5369 - val_loss: 0.5816
Epoch 5/20

  1/145 [..............................] - ETA: 1s - loss: 2.6723
  9/145 [>.............................] - ETA: 0s - loss: 2.4941
 20/145 [===>..........................] - ETA: 0s - loss: 2.2869
 30/145 [=====>........................] - ETA: 0s - loss: 2.2124
 41/145 [=======>......................] - ETA: 0s - loss: 2.1710
 51/145 [=========>....................] - ETA: 0s - loss: 2.1798
 61/145 [===========>..................] - ETA: 0s - loss: 2.1802
 72/145 [=============>................] - ETA: 0s - loss: 2.1622
 83/145 [================>.............] - ETA: 0s - loss: 2.1755
 94/145 [==================>...........] - ETA: 0s - loss: 2.1407
105/145 [====================>.........] - ETA: 0s - loss: 2.1034
115/145 [======================>.......] - ETA: 0s - loss: 2.0663
126/145 [=========================>....] - ETA: 0s - loss: 2.0341
137/145 [===========================>..] - ETA: 0s - loss: 2.0106
145/145 [==============================] - 1s 5ms/step - loss: 1.9963

145/145 [==============================] - 1s 6ms/step - loss: 1.9963 - val_loss: 0.6800
Epoch 6/20

  1/145 [..............................] - ETA: 0s - loss: 1.6451
 10/145 [=>............................] - ETA: 0s - loss: 1.8759
 20/145 [===>..........................] - ETA: 0s - loss: 1.8837
 30/145 [=====>........................] - ETA: 0s - loss: 1.8410
 39/145 [=======>......................] - ETA: 0s - loss: 1.7681
 49/145 [=========>....................] - ETA: 0s - loss: 1.7354
 60/145 [===========>..................] - ETA: 0s - loss: 1.7178
 70/145 [=============>................] - ETA: 0s - loss: 1.7201
 81/145 [===============>..............] - ETA: 0s - loss: 1.6933
 92/145 [==================>...........] - ETA: 0s - loss: 1.6739
103/145 [====================>.........] - ETA: 0s - loss: 1.6700
114/145 [======================>.......] - ETA: 0s - loss: 1.6623
125/145 [========================>.....] - ETA: 0s - loss: 1.6400
136/145 [===========================>..] - ETA: 0s - loss: 1.6359
145/145 [==============================] - 1s 5ms/step - loss: 1.6125

145/145 [==============================] - 1s 6ms/step - loss: 1.6125 - val_loss: 0.4125
Epoch 7/20

  1/145 [..............................] - ETA: 1s - loss: 1.3797
  8/145 [>.............................] - ETA: 1s - loss: 1.4366
 18/145 [==>...........................] - ETA: 0s - loss: 1.4155
 28/145 [====>.........................] - ETA: 0s - loss: 1.4122
 35/145 [======>.......................] - ETA: 0s - loss: 1.4595
 44/145 [========>.....................] - ETA: 0s - loss: 1.4244
 55/145 [==========>...................] - ETA: 0s - loss: 1.4464
 66/145 [============>.................] - ETA: 0s - loss: 1.4534
 77/145 [==============>...............] - ETA: 0s - loss: 1.4565
 88/145 [=================>............] - ETA: 0s - loss: 1.4558
 99/145 [===================>..........] - ETA: 0s - loss: 1.4396
110/145 [=====================>........] - ETA: 0s - loss: 1.4300
121/145 [========================>.....] - ETA: 0s - loss: 1.4109
132/145 [==========================>...] - ETA: 0s - loss: 1.4028
142/145 [============================>.] - ETA: 0s - loss: 1.4008
145/145 [==============================] - 1s 5ms/step - loss: 1.3981

145/145 [==============================] - 1s 6ms/step - loss: 1.3981 - val_loss: 0.2959
Epoch 8/20

  1/145 [..............................] - ETA: 1s - loss: 0.9104
 10/145 [=>............................] - ETA: 0s - loss: 1.2762
 21/145 [===>..........................] - ETA: 0s - loss: 1.3011
 31/145 [=====>........................] - ETA: 0s - loss: 1.2499
 42/145 [=======>......................] - ETA: 0s - loss: 1.2723
 53/145 [=========>....................] - ETA: 0s - loss: 1.2516
 64/145 [============>.................] - ETA: 0s - loss: 1.2513
 75/145 [==============>...............] - ETA: 0s - loss: 1.2469
 85/145 [================>.............] - ETA: 0s - loss: 1.2243
 96/145 [==================>...........] - ETA: 0s - loss: 1.2238
107/145 [=====================>........] - ETA: 0s - loss: 1.2321
117/145 [=======================>......] - ETA: 0s - loss: 1.2317
128/145 [=========================>....] - ETA: 0s - loss: 1.2260
139/145 [===========================>..] - ETA: 0s - loss: 1.2180
145/145 [==============================] - 1s 5ms/step - loss: 1.2225

145/145 [==============================] - 1s 6ms/step - loss: 1.2225 - val_loss: 0.3566
Epoch 9/20

  1/145 [..............................] - ETA: 1s - loss: 0.9176
 10/145 [=>............................] - ETA: 0s - loss: 1.1608
 20/145 [===>..........................] - ETA: 0s - loss: 1.1806
 31/145 [=====>........................] - ETA: 0s - loss: 1.1855
 42/145 [=======>......................] - ETA: 0s - loss: 1.1916
 53/145 [=========>....................] - ETA: 0s - loss: 1.1903
 64/145 [============>.................] - ETA: 0s - loss: 1.1788
 75/145 [==============>...............] - ETA: 0s - loss: 1.1633
 86/145 [================>.............] - ETA: 0s - loss: 1.1535
 97/145 [===================>..........] - ETA: 0s - loss: 1.1419
108/145 [=====================>........] - ETA: 0s - loss: 1.1369
119/145 [=======================>......] - ETA: 0s - loss: 1.1377
130/145 [=========================>....] - ETA: 0s - loss: 1.1367
140/145 [===========================>..] - ETA: 0s - loss: 1.1345
145/145 [==============================] - 1s 5ms/step - loss: 1.1392

145/145 [==============================] - 1s 6ms/step - loss: 1.1392 - val_loss: 0.2703
Epoch 10/20

  1/145 [..............................] - ETA: 0s - loss: 0.7177
 10/145 [=>............................] - ETA: 0s - loss: 0.9945
 21/145 [===>..........................] - ETA: 0s - loss: 1.0305
 32/145 [=====>........................] - ETA: 0s - loss: 1.0724
 42/145 [=======>......................] - ETA: 0s - loss: 1.0901
 53/145 [=========>....................] - ETA: 0s - loss: 1.0879
 63/145 [============>.................] - ETA: 0s - loss: 1.0833
 74/145 [==============>...............] - ETA: 0s - loss: 1.0786
 84/145 [================>.............] - ETA: 0s - loss: 1.0907
 95/145 [==================>...........] - ETA: 0s - loss: 1.0924
106/145 [====================>.........] - ETA: 0s - loss: 1.0935
117/145 [=======================>......] - ETA: 0s - loss: 1.0880
128/145 [=========================>....] - ETA: 0s - loss: 1.0842
139/145 [===========================>..] - ETA: 0s - loss: 1.0795
145/145 [==============================] - 1s 5ms/step - loss: 1.0823

145/145 [==============================] - 1s 6ms/step - loss: 1.0823 - val_loss: 0.2235
Epoch 11/20

  1/145 [..............................] - ETA: 1s - loss: 0.9234
 10/145 [=>............................] - ETA: 0s - loss: 1.0393
 21/145 [===>..........................] - ETA: 0s - loss: 1.0970
 32/145 [=====>........................] - ETA: 0s - loss: 1.0911
 43/145 [=======>......................] - ETA: 0s - loss: 1.0702
 53/145 [=========>....................] - ETA: 0s - loss: 1.0555
 64/145 [============>.................] - ETA: 0s - loss: 1.0566
 75/145 [==============>...............] - ETA: 0s - loss: 1.0407
 86/145 [================>.............] - ETA: 0s - loss: 1.0333
 97/145 [===================>..........] - ETA: 0s - loss: 1.0310
108/145 [=====================>........] - ETA: 0s - loss: 1.0315
119/145 [=======================>......] - ETA: 0s - loss: 1.0294
130/145 [=========================>....] - ETA: 0s - loss: 1.0150
141/145 [============================>.] - ETA: 0s - loss: 1.0074
145/145 [==============================] - 1s 5ms/step - loss: 1.0092

145/145 [==============================] - 1s 6ms/step - loss: 1.0092 - val_loss: 0.1606
Epoch 12/20

  1/145 [..............................] - ETA: 1s - loss: 1.0959
 10/145 [=>............................] - ETA: 0s - loss: 1.0997
 20/145 [===>..........................] - ETA: 0s - loss: 1.1739
 31/145 [=====>........................] - ETA: 0s - loss: 1.1158
 40/145 [=======>......................] - ETA: 0s - loss: 1.1094
 51/145 [=========>....................] - ETA: 0s - loss: 1.0877
 61/145 [===========>..................] - ETA: 0s - loss: 1.0639
 72/145 [=============>................] - ETA: 0s - loss: 1.0319
 83/145 [================>.............] - ETA: 0s - loss: 1.0050
 94/145 [==================>...........] - ETA: 0s - loss: 0.9928
106/145 [====================>.........] - ETA: 0s - loss: 0.9815
117/145 [=======================>......] - ETA: 0s - loss: 0.9718
128/145 [=========================>....] - ETA: 0s - loss: 0.9592
134/145 [==========================>...] - ETA: 0s - loss: 0.9554
143/145 [============================>.] - ETA: 0s - loss: 0.9475
145/145 [==============================] - 1s 5ms/step - loss: 0.9474

145/145 [==============================] - 1s 6ms/step - loss: 0.9474 - val_loss: 0.1331
Epoch 13/20

  1/145 [..............................] - ETA: 1s - loss: 1.2933
 10/145 [=>............................] - ETA: 0s - loss: 0.9642
 21/145 [===>..........................] - ETA: 0s - loss: 1.0113
 32/145 [=====>........................] - ETA: 0s - loss: 0.9469
 43/145 [=======>......................] - ETA: 0s - loss: 0.9259
 54/145 [==========>...................] - ETA: 0s - loss: 0.9478
 65/145 [============>.................] - ETA: 0s - loss: 0.9404
 76/145 [==============>...............] - ETA: 0s - loss: 0.9475
 87/145 [=================>............] - ETA: 0s - loss: 0.9354
 98/145 [===================>..........] - ETA: 0s - loss: 0.9304
109/145 [=====================>........] - ETA: 0s - loss: 0.9263
120/145 [=======================>......] - ETA: 0s - loss: 0.9331
131/145 [==========================>...] - ETA: 0s - loss: 0.9292
142/145 [============================>.] - ETA: 0s - loss: 0.9305
145/145 [==============================] - 1s 5ms/step - loss: 0.9306

145/145 [==============================] - 1s 6ms/step - loss: 0.9306 - val_loss: 0.0494
Epoch 14/20

  1/145 [..............................] - ETA: 1s - loss: 1.3148
  9/145 [>.............................] - ETA: 0s - loss: 0.9497
 19/145 [==>...........................] - ETA: 0s - loss: 0.9796
 29/145 [=====>........................] - ETA: 0s - loss: 0.9708
 40/145 [=======>......................] - ETA: 0s - loss: 1.0004
 51/145 [=========>....................] - ETA: 0s - loss: 0.9955
 62/145 [===========>..................] - ETA: 0s - loss: 1.0031
 73/145 [==============>...............] - ETA: 0s - loss: 0.9821
 84/145 [================>.............] - ETA: 0s - loss: 1.0005
 95/145 [==================>...........] - ETA: 0s - loss: 0.9823
106/145 [====================>.........] - ETA: 0s - loss: 0.9760
117/145 [=======================>......] - ETA: 0s - loss: 0.9699
128/145 [=========================>....] - ETA: 0s - loss: 0.9751
139/145 [===========================>..] - ETA: 0s - loss: 0.9703
145/145 [==============================] - 1s 5ms/step - loss: 0.9719

145/145 [==============================] - 1s 6ms/step - loss: 0.9719 - val_loss: 0.0665
Epoch 15/20

  1/145 [..............................] - ETA: 0s - loss: 1.0839
 10/145 [=>............................] - ETA: 0s - loss: 0.9706
 21/145 [===>..........................] - ETA: 0s - loss: 0.9496
 32/145 [=====>........................] - ETA: 0s - loss: 0.9541
 43/145 [=======>......................] - ETA: 0s - loss: 1.0027
 54/145 [==========>...................] - ETA: 0s - loss: 1.0247
 65/145 [============>.................] - ETA: 0s - loss: 1.0258
 76/145 [==============>...............] - ETA: 0s - loss: 1.0422
 87/145 [=================>............] - ETA: 0s - loss: 1.0377
 97/145 [===================>..........] - ETA: 0s - loss: 1.0383
108/145 [=====================>........] - ETA: 0s - loss: 1.0199
119/145 [=======================>......] - ETA: 0s - loss: 1.0224
130/145 [=========================>....] - ETA: 0s - loss: 1.0136
141/145 [============================>.] - ETA: 0s - loss: 0.9983
145/145 [==============================] - 1s 5ms/step - loss: 1.0049

145/145 [==============================] - 1s 6ms/step - loss: 1.0049 - val_loss: 0.0665
Epoch 16/20

  1/145 [..............................] - ETA: 1s - loss: 0.7695
 10/145 [=>............................] - ETA: 0s - loss: 1.1915
 21/145 [===>..........................] - ETA: 0s - loss: 1.1848
 32/145 [=====>........................] - ETA: 0s - loss: 1.1462
 43/145 [=======>......................] - ETA: 0s - loss: 1.1450
 54/145 [==========>...................] - ETA: 0s - loss: 1.1031
 65/145 [============>.................] - ETA: 0s - loss: 1.0656
 76/145 [==============>...............] - ETA: 0s - loss: 1.0481
 87/145 [=================>............] - ETA: 0s - loss: 1.0312
 98/145 [===================>..........] - ETA: 0s - loss: 1.0389
109/145 [=====================>........] - ETA: 0s - loss: 1.0153
116/145 [=======================>......] - ETA: 0s - loss: 1.0050
126/145 [=========================>....] - ETA: 0s - loss: 1.0029
136/145 [===========================>..] - ETA: 0s - loss: 0.9983
145/145 [==============================] - 1s 5ms/step - loss: 0.9961

145/145 [==============================] - 1s 6ms/step - loss: 0.9961 - val_loss: 0.0411
Epoch 17/20

  1/145 [..............................] - ETA: 1s - loss: 0.4677
 10/145 [=>............................] - ETA: 0s - loss: 1.0510
 20/145 [===>..........................] - ETA: 0s - loss: 1.0894
 31/145 [=====>........................] - ETA: 0s - loss: 1.0569
 41/145 [=======>......................] - ETA: 0s - loss: 1.0424
 52/145 [=========>....................] - ETA: 0s - loss: 1.0108
 62/145 [===========>..................] - ETA: 0s - loss: 1.0083
 73/145 [==============>...............] - ETA: 0s - loss: 1.0259
 84/145 [================>.............] - ETA: 0s - loss: 1.0267
 90/145 [=================>............] - ETA: 0s - loss: 1.0323
 97/145 [===================>..........] - ETA: 0s - loss: 1.0229
106/145 [====================>.........] - ETA: 0s - loss: 1.0186
109/145 [=====================>........] - ETA: 0s - loss: 1.0208
116/145 [=======================>......] - ETA: 0s - loss: 1.0326
123/145 [========================>.....] - ETA: 0s - loss: 1.0285
128/145 [=========================>....] - ETA: 0s - loss: 1.0293
135/145 [==========================>...] - ETA: 0s - loss: 1.0256
141/145 [============================>.] - ETA: 0s - loss: 1.0257
145/145 [==============================] - 1s 7ms/step - loss: 1.0285

145/145 [==============================] - 1s 8ms/step - loss: 1.0285 - val_loss: 0.0414
Epoch 18/20

  1/145 [..............................] - ETA: 1s - loss: 1.1976
  3/145 [..............................] - ETA: 4s - loss: 1.2672
  7/145 [>.............................] - ETA: 3s - loss: 1.3874
 16/145 [==>...........................] - ETA: 1s - loss: 1.3621
 26/145 [====>.........................] - ETA: 1s - loss: 1.3231
 37/145 [======>.......................] - ETA: 0s - loss: 1.2577
 48/145 [========>.....................] - ETA: 0s - loss: 1.2372
 59/145 [===========>..................] - ETA: 0s - loss: 1.2117
 70/145 [=============>................] - ETA: 0s - loss: 1.2113
 81/145 [===============>..............] - ETA: 0s - loss: 1.1984
 91/145 [=================>............] - ETA: 0s - loss: 1.1845
102/145 [====================>.........] - ETA: 0s - loss: 1.1717
113/145 [======================>.......] - ETA: 0s - loss: 1.1626
124/145 [========================>.....] - ETA: 0s - loss: 1.1500
134/145 [==========================>...] - ETA: 0s - loss: 1.1410
144/145 [============================>.] - ETA: 0s - loss: 1.1510
145/145 [==============================] - 1s 6ms/step - loss: 1.1515

145/145 [==============================] - 1s 7ms/step - loss: 1.1515 - val_loss: 0.0455
Epoch 19/20

  1/145 [..............................] - ETA: 0s - loss: 1.8353
 10/145 [=>............................] - ETA: 0s - loss: 1.4278
 20/145 [===>..........................] - ETA: 0s - loss: 1.3412
 31/145 [=====>........................] - ETA: 0s - loss: 1.2940
 39/145 [=======>......................] - ETA: 0s - loss: 1.2782
 45/145 [========>.....................] - ETA: 0s - loss: 1.2622
 54/145 [==========>...................] - ETA: 0s - loss: 1.2228
 64/145 [============>.................] - ETA: 0s - loss: 1.1964
 69/145 [=============>................] - ETA: 0s - loss: 1.1846
 79/145 [===============>..............] - ETA: 0s - loss: 1.1518
 90/145 [=================>............] - ETA: 0s - loss: 1.1547
101/145 [===================>..........] - ETA: 0s - loss: 1.1493
111/145 [=====================>........] - ETA: 0s - loss: 1.1427
121/145 [========================>.....] - ETA: 0s - loss: 1.1469
131/145 [==========================>...] - ETA: 0s - loss: 1.1351
141/145 [============================>.] - ETA: 0s - loss: 1.1362
145/145 [==============================] - 1s 6ms/step - loss: 1.1385

145/145 [==============================] - 1s 7ms/step - loss: 1.1385 - val_loss: 0.0671
Epoch 20/20

  1/145 [..............................] - ETA: 0s - loss: 1.2627
 10/145 [=>............................] - ETA: 0s - loss: 2.4640
 20/145 [===>..........................] - ETA: 0s - loss: 2.0883
 31/145 [=====>........................] - ETA: 0s - loss: 1.8278
 42/145 [=======>......................] - ETA: 0s - loss: 1.6576
 53/145 [=========>....................] - ETA: 0s - loss: 1.5847
 64/145 [============>.................] - ETA: 0s - loss: 1.4868
 75/145 [==============>...............] - ETA: 0s - loss: 1.4397
 86/145 [================>.............] - ETA: 0s - loss: 1.3812
 97/145 [===================>..........] - ETA: 0s - loss: 1.3432
108/145 [=====================>........] - ETA: 0s - loss: 1.3251
118/145 [=======================>......] - ETA: 0s - loss: 1.3224
129/145 [=========================>....] - ETA: 0s - loss: 1.2958
140/145 [===========================>..] - ETA: 0s - loss: 1.2828
145/145 [==============================] - 1s 5ms/step - loss: 1.2791

145/145 [==============================] - 1s 6ms/step - loss: 1.2791 - val_loss: 0.1387
kerasPrediction<-model %>% predict(as.matrix(carbonTestingFinal))
2024-05-06 19:42:45.149805: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.

 1/63 [..............................] - ETA: 3s
18/63 [=======>......................] - ETA: 0s
43/63 [===================>..........] - ETA: 0s
63/63 [==============================] - 0s 2ms/step

63/63 [==============================] - 0s 2ms/step
rmse=function(x,y){
  return((mean(x-y)^2)^0.5)
}

rmse(kerasPrediction,carbonTestLabels)
[1] 0.2827712
MAE(kerasPrediction,carbonTestLabels)
[1] 0.3099322
rsquared<-sum((kerasPrediction-carbonTestLabels)^2)/sum((carbonTestLabels-mean(carbonTestLabels))^2)
rsquared
[1] 0.7408867
library(tfruns)
runs<-tuning_run(
  "carbonEmission.R",
  flags=list(
    learning_rate=c(0.1,0.5,0.01,0.001),
    nodes1=c(8,16,32,64,128),
    nodes2=c(8,16,32,64,128),
    nodes3=c(8,16,32,64,128),
    batch_size=c(16,32,64,128),
    dropout=c(0.1,0.2,0.3,0.4,0.5),
    activation=c("relu")
  ),sample=0.001
)
10,000 total combinations of flags 
(sampled to 10 combinations)
y
Training run 1/10 (flags = list(0.01, 64, 128, 8, 16, 0.3, "relu")) 
Using run directory runs/2024-05-07T00-42-50Z

> FLAGS<- flags(
+   flag_numeric("nodes1", 32),
+   flag_numeric("nodes2", 32),
+   flag_numeric("nodes3", 32),
+   flag_numeric("batch_size",32),
+  .... [TRUNCATED] 

> model = keras_model_sequential()

> model %>%
+   layer_dense(units = FLAGS$nodes1, activation = FLAGS$activation, input_shape = dim(carbonTrainingFinal)[2]) %>%
+   layer_dropout(rate .... [TRUNCATED] 

> model %>% compile(
+   loss="mse",
+   optimizer=optimizer_adam(lr=FLAGS$learning_rate)
+ )
WARNING:absl:At this time, the v2.11+ optimizer `tf.keras.optimizers.Adam` runs slowly on M1/M2 Macs, please use the legacy Keras optimizer instead, located at `tf.keras.optimizers.legacy.Adam`.
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
WARNING:absl:There is a known slowdown when using v2.11+ Keras optimizers on M1/M2 Macs. Falling back to the legacy Keras optimizer, i.e., `tf.keras.optimizers.legacy.Adam`.

> model %>%fit(  as.matrix(carbonTrainingFinal),
+                carbonTrainingLabels,
+                batch_size=FLAGS$batch_size,
+                .... [TRUNCATED] 
Epoch 1/20
2024-05-06 19:42:52.758485: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0060s vs `on_train_batch_end` time: 0.0102s). Check your callbacks.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0060s vs `on_train_batch_end` time: 0.0102s). Check your callbacks.
2024-05-06 19:42:55.631617: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
451/451 - 5s - loss: 4.5205 - val_loss: 0.4005 - 5s/epoch - 11ms/step
Epoch 2/20
451/451 - 3s - loss: 0.4233 - val_loss: 0.4380 - 3s/epoch - 7ms/step
Epoch 3/20
451/451 - 4s - loss: 0.2020 - val_loss: 0.8268 - 4s/epoch - 10ms/step
Epoch 4/20
451/451 - 3s - loss: 0.1136 - val_loss: 0.8045 - 3s/epoch - 7ms/step
Epoch 5/20
451/451 - 3s - loss: 0.0774 - val_loss: 0.9548 - 3s/epoch - 8ms/step
Epoch 6/20
451/451 - 3s - loss: 0.0675 - val_loss: 0.8364 - 3s/epoch - 7ms/step
Epoch 7/20
451/451 - 3s - loss: 0.0749 - val_loss: 0.6173 - 3s/epoch - 7ms/step
Epoch 8/20
451/451 - 3s - loss: 0.1461 - val_loss: 0.5754 - 3s/epoch - 6ms/step
Epoch 9/20
451/451 - 4s - loss: 0.1720 - val_loss: 0.6730 - 4s/epoch - 9ms/step
Epoch 10/20
451/451 - 4s - loss: 0.1697 - val_loss: 1.0733 - 4s/epoch - 8ms/step
Epoch 11/20
451/451 - 3s - loss: 0.2268 - val_loss: 1.8652 - 3s/epoch - 7ms/step
Epoch 12/20
451/451 - 3s - loss: 0.2462 - val_loss: 3.5136 - 3s/epoch - 6ms/step
Epoch 13/20
451/451 - 3s - loss: 0.3646 - val_loss: 3.4387 - 3s/epoch - 7ms/step
Epoch 14/20
451/451 - 3s - loss: 0.2858 - val_loss: 5.3962 - 3s/epoch - 6ms/step
Epoch 15/20
451/451 - 3s - loss: 0.4880 - val_loss: 5.5874 - 3s/epoch - 6ms/step
Epoch 16/20
451/451 - 3s - loss: 0.2290 - val_loss: 6.1486 - 3s/epoch - 6ms/step
Epoch 17/20
451/451 - 3s - loss: 0.5407 - val_loss: 6.6616 - 3s/epoch - 6ms/step
Epoch 18/20
451/451 - 3s - loss: 0.2750 - val_loss: 5.1347 - 3s/epoch - 6ms/step
Epoch 19/20
451/451 - 3s - loss: 0.7823 - val_loss: 6.0686 - 3s/epoch - 7ms/step
Epoch 20/20
451/451 - 3s - loss: 0.2407 - val_loss: 6.4525 - 3s/epoch - 6ms/step

Run completed: runs/2024-05-07T00-42-50Z

Training run 2/10 (flags = list(0.1, 64, 32, 64, 32, 0.5, "relu")) 
Using run directory runs/2024-05-07T00-43-56Z

> FLAGS<- flags(
+   flag_numeric("nodes1", 32),
+   flag_numeric("nodes2", 32),
+   flag_numeric("nodes3", 32),
+   flag_numeric("batch_size",32),
+  .... [TRUNCATED] 

> model = keras_model_sequential()

> model %>%
+   layer_dense(units = FLAGS$nodes1, activation = FLAGS$activation, input_shape = dim(carbonTrainingFinal)[2]) %>%
+   layer_dropout(rate .... [TRUNCATED] 

> model %>% compile(
+   loss="mse",
+   optimizer=optimizer_adam(lr=FLAGS$learning_rate)
+ )
WARNING:absl:At this time, the v2.11+ optimizer `tf.keras.optimizers.Adam` runs slowly on M1/M2 Macs, please use the legacy Keras optimizer instead, located at `tf.keras.optimizers.legacy.Adam`.
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
WARNING:absl:There is a known slowdown when using v2.11+ Keras optimizers on M1/M2 Macs. Falling back to the legacy Keras optimizer, i.e., `tf.keras.optimizers.legacy.Adam`.

> model %>%fit(  as.matrix(carbonTrainingFinal),
+                carbonTrainingLabels,
+                batch_size=FLAGS$batch_size,
+                .... [TRUNCATED] 
Epoch 1/20
2024-05-06 19:43:59.199273: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0057s vs `on_train_batch_end` time: 0.0093s). Check your callbacks.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0057s vs `on_train_batch_end` time: 0.0093s). Check your callbacks.
2024-05-06 19:44:00.902248: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
226/226 - 5s - loss: 14.3461 - val_loss: 3.2779 - 5s/epoch - 23ms/step
Epoch 2/20
226/226 - 2s - loss: 2.3126 - val_loss: 2.0409 - 2s/epoch - 11ms/step
Epoch 3/20
226/226 - 2s - loss: 1.2771 - val_loss: 1.5950 - 2s/epoch - 8ms/step
Epoch 4/20
226/226 - 2s - loss: 0.8722 - val_loss: 1.2679 - 2s/epoch - 7ms/step
Epoch 5/20
226/226 - 2s - loss: 0.6546 - val_loss: 1.4245 - 2s/epoch - 9ms/step
Epoch 6/20
226/226 - 2s - loss: 0.5112 - val_loss: 1.6146 - 2s/epoch - 10ms/step
Epoch 7/20
226/226 - 2s - loss: 0.4576 - val_loss: 1.4389 - 2s/epoch - 8ms/step
Epoch 8/20
226/226 - 2s - loss: 0.4060 - val_loss: 1.8662 - 2s/epoch - 9ms/step
Epoch 9/20
226/226 - 2s - loss: 0.3691 - val_loss: 1.5593 - 2s/epoch - 9ms/step
Epoch 10/20
226/226 - 2s - loss: 0.3522 - val_loss: 1.6031 - 2s/epoch - 7ms/step
Epoch 11/20
226/226 - 2s - loss: 0.3336 - val_loss: 1.8204 - 2s/epoch - 7ms/step
Epoch 12/20
226/226 - 2s - loss: 0.3520 - val_loss: 2.7458 - 2s/epoch - 10ms/step
Epoch 13/20
226/226 - 2s - loss: 0.3466 - val_loss: 2.5845 - 2s/epoch - 7ms/step
Epoch 14/20
226/226 - 2s - loss: 0.5042 - val_loss: 2.2117 - 2s/epoch - 7ms/step
Epoch 15/20
226/226 - 2s - loss: 0.4170 - val_loss: 2.3097 - 2s/epoch - 9ms/step
Epoch 16/20
226/226 - 2s - loss: 0.7537 - val_loss: 1.5459 - 2s/epoch - 8ms/step
Epoch 17/20
226/226 - 2s - loss: 0.6456 - val_loss: 3.3727 - 2s/epoch - 8ms/step
Epoch 18/20
226/226 - 2s - loss: 0.8647 - val_loss: 2.2160 - 2s/epoch - 9ms/step
Epoch 19/20
226/226 - 2s - loss: 3.1060 - val_loss: 3.0374 - 2s/epoch - 7ms/step
Epoch 20/20
226/226 - 2s - loss: 1.0103 - val_loss: 1.9587 - 2s/epoch - 8ms/step

Run completed: runs/2024-05-07T00-43-56Z

Training run 3/10 (flags = list(0.01, 16, 32, 16, 32, 0.1, "relu")) 
Using run directory runs/2024-05-07T00-44-37Z

> FLAGS<- flags(
+   flag_numeric("nodes1", 32),
+   flag_numeric("nodes2", 32),
+   flag_numeric("nodes3", 32),
+   flag_numeric("batch_size",32),
+  .... [TRUNCATED] 

> model = keras_model_sequential()

> model %>%
+   layer_dense(units = FLAGS$nodes1, activation = FLAGS$activation, input_shape = dim(carbonTrainingFinal)[2]) %>%
+   layer_dropout(rate .... [TRUNCATED] 

> model %>% compile(
+   loss="mse",
+   optimizer=optimizer_adam(lr=FLAGS$learning_rate)
+ )
WARNING:absl:At this time, the v2.11+ optimizer `tf.keras.optimizers.Adam` runs slowly on M1/M2 Macs, please use the legacy Keras optimizer instead, located at `tf.keras.optimizers.legacy.Adam`.
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
WARNING:absl:There is a known slowdown when using v2.11+ Keras optimizers on M1/M2 Macs. Falling back to the legacy Keras optimizer, i.e., `tf.keras.optimizers.legacy.Adam`.

> model %>%fit(  as.matrix(carbonTrainingFinal),
+                carbonTrainingLabels,
+                batch_size=FLAGS$batch_size,
+                .... [TRUNCATED] 
Epoch 1/20
2024-05-06 19:44:38.417896: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0062s vs `on_train_batch_end` time: 0.0115s). Check your callbacks.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0062s vs `on_train_batch_end` time: 0.0115s). Check your callbacks.
2024-05-06 19:44:42.894193: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
226/226 - 5s - loss: 11.3299 - val_loss: 0.8747 - 5s/epoch - 24ms/step
Epoch 2/20
226/226 - 2s - loss: 2.9246 - val_loss: 0.6621 - 2s/epoch - 11ms/step
Epoch 3/20
226/226 - 2s - loss: 1.6391 - val_loss: 0.6023 - 2s/epoch - 10ms/step
Epoch 4/20
226/226 - 2s - loss: 0.9053 - val_loss: 0.7329 - 2s/epoch - 10ms/step
Epoch 5/20
226/226 - 2s - loss: 0.5261 - val_loss: 1.1103 - 2s/epoch - 10ms/step
Epoch 6/20
226/226 - 2s - loss: 0.3653 - val_loss: 1.1065 - 2s/epoch - 7ms/step
Epoch 7/20
226/226 - 2s - loss: 0.3033 - val_loss: 0.9268 - 2s/epoch - 8ms/step
Epoch 8/20
226/226 - 2s - loss: 0.2484 - val_loss: 1.1910 - 2s/epoch - 9ms/step
Epoch 9/20
226/226 - 2s - loss: 0.2362 - val_loss: 1.0927 - 2s/epoch - 9ms/step
Epoch 10/20
226/226 - 2s - loss: 0.2023 - val_loss: 1.1800 - 2s/epoch - 8ms/step
Epoch 11/20
226/226 - 2s - loss: 0.1806 - val_loss: 1.1348 - 2s/epoch - 9ms/step
Epoch 12/20
226/226 - 2s - loss: 0.1748 - val_loss: 0.8917 - 2s/epoch - 8ms/step
Epoch 13/20
226/226 - 2s - loss: 0.1582 - val_loss: 1.3242 - 2s/epoch - 7ms/step
Epoch 14/20
226/226 - 2s - loss: 0.1353 - val_loss: 0.9758 - 2s/epoch - 7ms/step
Epoch 15/20
226/226 - 2s - loss: 0.1355 - val_loss: 1.3543 - 2s/epoch - 8ms/step
Epoch 16/20
226/226 - 2s - loss: 0.1270 - val_loss: 1.8962 - 2s/epoch - 8ms/step
Epoch 17/20
226/226 - 2s - loss: 0.1327 - val_loss: 2.2965 - 2s/epoch - 8ms/step
Epoch 18/20
226/226 - 2s - loss: 0.1413 - val_loss: 1.6279 - 2s/epoch - 8ms/step
Epoch 19/20
226/226 - 2s - loss: 0.1550 - val_loss: 1.5583 - 2s/epoch - 7ms/step
Epoch 20/20
226/226 - 2s - loss: 0.1534 - val_loss: 1.1836 - 2s/epoch - 7ms/step

Run completed: runs/2024-05-07T00-44-37Z

Training run 4/10 (flags = list(0.01, 128, 8, 128, 128, 0.3, "relu")) 
Using run directory runs/2024-05-07T00-45-19Z

> FLAGS<- flags(
+   flag_numeric("nodes1", 32),
+   flag_numeric("nodes2", 32),
+   flag_numeric("nodes3", 32),
+   flag_numeric("batch_size",32),
+  .... [TRUNCATED] 

> model = keras_model_sequential()

> model %>%
+   layer_dense(units = FLAGS$nodes1, activation = FLAGS$activation, input_shape = dim(carbonTrainingFinal)[2]) %>%
+   layer_dropout(rate .... [TRUNCATED] 

> model %>% compile(
+   loss="mse",
+   optimizer=optimizer_adam(lr=FLAGS$learning_rate)
+ )
WARNING:absl:At this time, the v2.11+ optimizer `tf.keras.optimizers.Adam` runs slowly on M1/M2 Macs, please use the legacy Keras optimizer instead, located at `tf.keras.optimizers.legacy.Adam`.
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
WARNING:absl:There is a known slowdown when using v2.11+ Keras optimizers on M1/M2 Macs. Falling back to the legacy Keras optimizer, i.e., `tf.keras.optimizers.legacy.Adam`.

> model %>%fit(  as.matrix(carbonTrainingFinal),
+                carbonTrainingLabels,
+                batch_size=FLAGS$batch_size,
+                .... [TRUNCATED] 
Epoch 1/20
2024-05-06 19:45:22.172741: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0060s vs `on_train_batch_end` time: 0.0094s). Check your callbacks.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0060s vs `on_train_batch_end` time: 0.0094s). Check your callbacks.
2024-05-06 19:45:24.718914: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
57/57 - 3s - loss: 21.1251 - val_loss: 1.5118 - 3s/epoch - 59ms/step
Epoch 2/20
57/57 - 0s - loss: 6.1987 - val_loss: 0.4771 - 458ms/epoch - 8ms/step
Epoch 3/20
57/57 - 1s - loss: 4.4051 - val_loss: 0.2806 - 1s/epoch - 22ms/step
Epoch 4/20
57/57 - 0s - loss: 3.7106 - val_loss: 0.2391 - 442ms/epoch - 8ms/step
Epoch 5/20
57/57 - 1s - loss: 3.0778 - val_loss: 0.1993 - 1s/epoch - 18ms/step
Epoch 6/20
57/57 - 1s - loss: 2.5263 - val_loss: 0.1953 - 743ms/epoch - 13ms/step
Epoch 7/20
57/57 - 1s - loss: 1.7711 - val_loss: 0.1573 - 921ms/epoch - 16ms/step
Epoch 8/20
57/57 - 0s - loss: 0.9103 - val_loss: 0.1378 - 447ms/epoch - 8ms/step
Epoch 9/20
57/57 - 1s - loss: 0.4535 - val_loss: 0.1291 - 728ms/epoch - 13ms/step
Epoch 10/20
57/57 - 0s - loss: 0.2738 - val_loss: 0.1046 - 444ms/epoch - 8ms/step
Epoch 11/20
57/57 - 1s - loss: 0.2211 - val_loss: 0.1144 - 690ms/epoch - 12ms/step
Epoch 12/20
57/57 - 1s - loss: 0.1963 - val_loss: 0.0547 - 740ms/epoch - 13ms/step
Epoch 13/20
57/57 - 0s - loss: 0.1874 - val_loss: 0.0956 - 443ms/epoch - 8ms/step
Epoch 14/20
57/57 - 1s - loss: 0.1646 - val_loss: 0.0446 - 1s/epoch - 18ms/step
Epoch 15/20
57/57 - 1s - loss: 0.1588 - val_loss: 0.0288 - 962ms/epoch - 17ms/step
Epoch 16/20
57/57 - 1s - loss: 0.1845 - val_loss: 0.0203 - 729ms/epoch - 13ms/step
Epoch 17/20
57/57 - 0s - loss: 0.1786 - val_loss: 0.0361 - 458ms/epoch - 8ms/step
Epoch 18/20
57/57 - 1s - loss: 0.2073 - val_loss: 0.1647 - 676ms/epoch - 12ms/step
Epoch 19/20
57/57 - 1s - loss: 0.2029 - val_loss: 0.0998 - 721ms/epoch - 13ms/step
Epoch 20/20
57/57 - 0s - loss: 0.2267 - val_loss: 0.0275 - 439ms/epoch - 8ms/step

Run completed: runs/2024-05-07T00-45-19Z

Training run 5/10 (flags = list(0.01, 8, 32, 32, 16, 0.3, "relu")) 
Using run directory runs/2024-05-07T00-45-38Z

> FLAGS<- flags(
+   flag_numeric("nodes1", 32),
+   flag_numeric("nodes2", 32),
+   flag_numeric("nodes3", 32),
+   flag_numeric("batch_size",32),
+  .... [TRUNCATED] 

> model = keras_model_sequential()

> model %>%
+   layer_dense(units = FLAGS$nodes1, activation = FLAGS$activation, input_shape = dim(carbonTrainingFinal)[2]) %>%
+   layer_dropout(rate .... [TRUNCATED] 

> model %>% compile(
+   loss="mse",
+   optimizer=optimizer_adam(lr=FLAGS$learning_rate)
+ )
WARNING:absl:At this time, the v2.11+ optimizer `tf.keras.optimizers.Adam` runs slowly on M1/M2 Macs, please use the legacy Keras optimizer instead, located at `tf.keras.optimizers.legacy.Adam`.
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
WARNING:absl:There is a known slowdown when using v2.11+ Keras optimizers on M1/M2 Macs. Falling back to the legacy Keras optimizer, i.e., `tf.keras.optimizers.legacy.Adam`.

> model %>%fit(  as.matrix(carbonTrainingFinal),
+                carbonTrainingLabels,
+                batch_size=FLAGS$batch_size,
+                .... [TRUNCATED] 
Epoch 1/20
2024-05-06 19:45:39.575838: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0061s vs `on_train_batch_end` time: 0.0103s). Check your callbacks.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0061s vs `on_train_batch_end` time: 0.0103s). Check your callbacks.
2024-05-06 19:45:44.887951: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
451/451 - 7s - loss: 13.4441 - val_loss: 2.1683 - 7s/epoch - 16ms/step
Epoch 2/20
451/451 - 4s - loss: 1.7129 - val_loss: 1.7100 - 4s/epoch - 8ms/step
Epoch 3/20
451/451 - 3s - loss: 1.0867 - val_loss: 1.6948 - 3s/epoch - 6ms/step
Epoch 4/20
451/451 - 3s - loss: 0.8299 - val_loss: 0.8653 - 3s/epoch - 7ms/step
Epoch 5/20
451/451 - 3s - loss: 0.4791 - val_loss: 1.1193 - 3s/epoch - 6ms/step
Epoch 6/20
451/451 - 3s - loss: 0.4448 - val_loss: 0.5661 - 3s/epoch - 8ms/step
Epoch 7/20
451/451 - 3s - loss: 0.6548 - val_loss: 0.8835 - 3s/epoch - 7ms/step
Epoch 8/20
451/451 - 3s - loss: 1.0771 - val_loss: 1.3313 - 3s/epoch - 6ms/step
Epoch 9/20
451/451 - 3s - loss: 1.1615 - val_loss: 0.6273 - 3s/epoch - 7ms/step
Epoch 10/20
451/451 - 3s - loss: 2.2614 - val_loss: 8.0941 - 3s/epoch - 7ms/step
Epoch 11/20
451/451 - 3s - loss: 1.7852 - val_loss: 29.5401 - 3s/epoch - 7ms/step
Epoch 12/20
451/451 - 3s - loss: 1.7933 - val_loss: 72.5029 - 3s/epoch - 7ms/step
Epoch 13/20
451/451 - 3s - loss: 1.1154 - val_loss: 90.1913 - 3s/epoch - 7ms/step
Epoch 14/20
451/451 - 3s - loss: 1.0355 - val_loss: 124.6499 - 3s/epoch - 7ms/step
Epoch 15/20
451/451 - 3s - loss: 0.7773 - val_loss: 130.9595 - 3s/epoch - 7ms/step
Epoch 16/20
451/451 - 3s - loss: 0.6462 - val_loss: 146.5425 - 3s/epoch - 7ms/step
Epoch 17/20
451/451 - 3s - loss: 0.5830 - val_loss: 135.5791 - 3s/epoch - 7ms/step
Epoch 18/20
451/451 - 3s - loss: 0.8807 - val_loss: 116.1140 - 3s/epoch - 7ms/step
Epoch 19/20
451/451 - 3s - loss: 1.4421 - val_loss: 85.4320 - 3s/epoch - 7ms/step
Epoch 20/20
451/451 - 3s - loss: 1.6305 - val_loss: 68.1109 - 3s/epoch - 7ms/step

Run completed: runs/2024-05-07T00-45-38Z

Training run 6/10 (flags = list(0.1, 16, 128, 64, 16, 0.3, "relu")) 
Using run directory runs/2024-05-07T00-46-45Z

> FLAGS<- flags(
+   flag_numeric("nodes1", 32),
+   flag_numeric("nodes2", 32),
+   flag_numeric("nodes3", 32),
+   flag_numeric("batch_size",32),
+  .... [TRUNCATED] 

> model = keras_model_sequential()

> model %>%
+   layer_dense(units = FLAGS$nodes1, activation = FLAGS$activation, input_shape = dim(carbonTrainingFinal)[2]) %>%
+   layer_dropout(rate .... [TRUNCATED] 

> model %>% compile(
+   loss="mse",
+   optimizer=optimizer_adam(lr=FLAGS$learning_rate)
+ )
WARNING:absl:At this time, the v2.11+ optimizer `tf.keras.optimizers.Adam` runs slowly on M1/M2 Macs, please use the legacy Keras optimizer instead, located at `tf.keras.optimizers.legacy.Adam`.
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
WARNING:absl:There is a known slowdown when using v2.11+ Keras optimizers on M1/M2 Macs. Falling back to the legacy Keras optimizer, i.e., `tf.keras.optimizers.legacy.Adam`.

> model %>%fit(  as.matrix(carbonTrainingFinal),
+                carbonTrainingLabels,
+                batch_size=FLAGS$batch_size,
+                .... [TRUNCATED] 
Epoch 1/20
2024-05-06 19:46:46.473419: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0067s vs `on_train_batch_end` time: 0.0103s). Check your callbacks.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0067s vs `on_train_batch_end` time: 0.0103s). Check your callbacks.
2024-05-06 19:46:51.933581: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
451/451 - 7s - loss: 4.9497 - val_loss: 0.6491 - 7s/epoch - 15ms/step
Epoch 2/20
451/451 - 3s - loss: 1.1165 - val_loss: 1.0126 - 3s/epoch - 8ms/step
Epoch 3/20
451/451 - 3s - loss: 0.5470 - val_loss: 2.5428 - 3s/epoch - 7ms/step
Epoch 4/20
451/451 - 3s - loss: 0.4018 - val_loss: 3.2496 - 3s/epoch - 7ms/step
Epoch 5/20
451/451 - 3s - loss: 0.4970 - val_loss: 10.9413 - 3s/epoch - 7ms/step
Epoch 6/20
451/451 - 3s - loss: 0.6579 - val_loss: 27.3845 - 3s/epoch - 7ms/step
Epoch 7/20
451/451 - 3s - loss: 1.1860 - val_loss: 29.6661 - 3s/epoch - 7ms/step
Epoch 8/20
451/451 - 3s - loss: 1.0036 - val_loss: 40.4824 - 3s/epoch - 6ms/step
Epoch 9/20
451/451 - 3s - loss: 1.8350 - val_loss: 64.6819 - 3s/epoch - 6ms/step
Epoch 10/20
451/451 - 3s - loss: 2.5249 - val_loss: 84.0588 - 3s/epoch - 7ms/step
Epoch 11/20
451/451 - 3s - loss: 0.7133 - val_loss: 116.0839 - 3s/epoch - 6ms/step
Epoch 12/20
451/451 - 3s - loss: 1.6769 - val_loss: 146.3577 - 3s/epoch - 6ms/step
Epoch 13/20
451/451 - 3s - loss: 1.2271 - val_loss: 135.2326 - 3s/epoch - 6ms/step
Epoch 14/20
451/451 - 3s - loss: 4.7599 - val_loss: 44.5545 - 3s/epoch - 6ms/step
Epoch 15/20
451/451 - 3s - loss: 3.2621 - val_loss: 147.3202 - 3s/epoch - 6ms/step
Epoch 16/20
451/451 - 3s - loss: 1.4628 - val_loss: 192.1331 - 3s/epoch - 6ms/step
Epoch 17/20
451/451 - 3s - loss: 6.8850 - val_loss: 217.9560 - 3s/epoch - 6ms/step
Epoch 18/20
451/451 - 3s - loss: 0.6509 - val_loss: 241.0453 - 3s/epoch - 6ms/step
Epoch 19/20
451/451 - 3s - loss: 1.8630 - val_loss: 352.0300 - 3s/epoch - 6ms/step
Epoch 20/20
451/451 - 3s - loss: 2.8394 - val_loss: 425.2216 - 3s/epoch - 6ms/step

Run completed: runs/2024-05-07T00-46-45Z

Training run 7/10 (flags = list(0.1, 64, 32, 128, 64, 0.3, "relu")) 
Using run directory runs/2024-05-07T00-47-50Z

> FLAGS<- flags(
+   flag_numeric("nodes1", 32),
+   flag_numeric("nodes2", 32),
+   flag_numeric("nodes3", 32),
+   flag_numeric("batch_size",32),
+  .... [TRUNCATED] 

> model = keras_model_sequential()

> model %>%
+   layer_dense(units = FLAGS$nodes1, activation = FLAGS$activation, input_shape = dim(carbonTrainingFinal)[2]) %>%
+   layer_dropout(rate .... [TRUNCATED] 

> model %>% compile(
+   loss="mse",
+   optimizer=optimizer_adam(lr=FLAGS$learning_rate)
+ )
WARNING:absl:At this time, the v2.11+ optimizer `tf.keras.optimizers.Adam` runs slowly on M1/M2 Macs, please use the legacy Keras optimizer instead, located at `tf.keras.optimizers.legacy.Adam`.
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
WARNING:absl:There is a known slowdown when using v2.11+ Keras optimizers on M1/M2 Macs. Falling back to the legacy Keras optimizer, i.e., `tf.keras.optimizers.legacy.Adam`.

> model %>%fit(  as.matrix(carbonTrainingFinal),
+                carbonTrainingLabels,
+                batch_size=FLAGS$batch_size,
+                .... [TRUNCATED] 
Epoch 1/20
2024-05-06 19:47:50.751923: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0065s vs `on_train_batch_end` time: 0.0105s). Check your callbacks.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0065s vs `on_train_batch_end` time: 0.0105s). Check your callbacks.
2024-05-06 19:47:54.365512: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
113/113 - 5s - loss: 12.4502 - val_loss: 0.4850 - 5s/epoch - 42ms/step
Epoch 2/20
113/113 - 1s - loss: 2.6941 - val_loss: 0.2250 - 1s/epoch - 12ms/step
Epoch 3/20
113/113 - 1s - loss: 1.6491 - val_loss: 0.1194 - 1s/epoch - 9ms/step
Epoch 4/20
113/113 - 2s - loss: 1.1856 - val_loss: 0.1422 - 2s/epoch - 14ms/step
Epoch 5/20
113/113 - 1s - loss: 0.9046 - val_loss: 0.1761 - 1s/epoch - 12ms/step
Epoch 6/20
113/113 - 1s - loss: 0.7783 - val_loss: 0.2094 - 1s/epoch - 9ms/step
Epoch 7/20
113/113 - 1s - loss: 0.6274 - val_loss: 0.3228 - 1s/epoch - 12ms/step
Epoch 8/20
113/113 - 1s - loss: 0.5675 - val_loss: 0.4505 - 752ms/epoch - 7ms/step
Epoch 9/20
113/113 - 1s - loss: 0.4984 - val_loss: 0.3619 - 1s/epoch - 11ms/step
Epoch 10/20
113/113 - 1s - loss: 0.5197 - val_loss: 0.3234 - 1s/epoch - 11ms/step
Epoch 11/20
113/113 - 1s - loss: 0.4596 - val_loss: 0.6033 - 1s/epoch - 9ms/step
Epoch 12/20
113/113 - 1s - loss: 0.4506 - val_loss: 0.7246 - 1s/epoch - 9ms/step
Epoch 13/20
113/113 - 1s - loss: 0.4010 - val_loss: 0.5564 - 1s/epoch - 9ms/step
Epoch 14/20
113/113 - 1s - loss: 0.3933 - val_loss: 1.0406 - 1s/epoch - 9ms/step
Epoch 15/20
113/113 - 1s - loss: 0.3958 - val_loss: 0.8440 - 1s/epoch - 11ms/step
Epoch 16/20
113/113 - 1s - loss: 0.3832 - val_loss: 1.0860 - 1s/epoch - 10ms/step
Epoch 17/20
113/113 - 1s - loss: 0.3745 - val_loss: 1.7516 - 1s/epoch - 10ms/step
Epoch 18/20
113/113 - 1s - loss: 0.3701 - val_loss: 0.9606 - 1s/epoch - 12ms/step
Epoch 19/20
113/113 - 1s - loss: 0.3326 - val_loss: 1.1989 - 760ms/epoch - 7ms/step
Epoch 20/20
113/113 - 1s - loss: 0.5680 - val_loss: 1.1021 - 1s/epoch - 12ms/step

Run completed: runs/2024-05-07T00-47-50Z

Training run 8/10 (flags = list(0.001, 16, 32, 64, 128, 0.2, "relu")) 
Using run directory runs/2024-05-07T00-48-17Z

> FLAGS<- flags(
+   flag_numeric("nodes1", 32),
+   flag_numeric("nodes2", 32),
+   flag_numeric("nodes3", 32),
+   flag_numeric("batch_size",32),
+  .... [TRUNCATED] 

> model = keras_model_sequential()

> model %>%
+   layer_dense(units = FLAGS$nodes1, activation = FLAGS$activation, input_shape = dim(carbonTrainingFinal)[2]) %>%
+   layer_dropout(rate .... [TRUNCATED] 

> model %>% compile(
+   loss="mse",
+   optimizer=optimizer_adam(lr=FLAGS$learning_rate)
+ )
WARNING:absl:At this time, the v2.11+ optimizer `tf.keras.optimizers.Adam` runs slowly on M1/M2 Macs, please use the legacy Keras optimizer instead, located at `tf.keras.optimizers.legacy.Adam`.
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
WARNING:absl:There is a known slowdown when using v2.11+ Keras optimizers on M1/M2 Macs. Falling back to the legacy Keras optimizer, i.e., `tf.keras.optimizers.legacy.Adam`.

> model %>%fit(  as.matrix(carbonTrainingFinal),
+                carbonTrainingLabels,
+                batch_size=FLAGS$batch_size,
+                .... [TRUNCATED] 
Epoch 1/20
2024-05-06 19:48:20.610911: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0069s vs `on_train_batch_end` time: 0.0627s). Check your callbacks.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0069s vs `on_train_batch_end` time: 0.0627s). Check your callbacks.
2024-05-06 19:48:21.814358: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
57/57 - 5s - loss: 22.3041 - val_loss: 2.8608 - 5s/epoch - 83ms/step
Epoch 2/20
57/57 - 0s - loss: 5.5865 - val_loss: 1.0446 - 455ms/epoch - 8ms/step
Epoch 3/20
57/57 - 1s - loss: 4.1542 - val_loss: 0.6797 - 1s/epoch - 18ms/step
Epoch 4/20
57/57 - 0s - loss: 3.4006 - val_loss: 0.4687 - 447ms/epoch - 8ms/step
Epoch 5/20
57/57 - 0s - loss: 3.0850 - val_loss: 0.4654 - 461ms/epoch - 8ms/step
Epoch 6/20
57/57 - 1s - loss: 2.6984 - val_loss: 0.4543 - 736ms/epoch - 13ms/step
Epoch 7/20
57/57 - 1s - loss: 2.4992 - val_loss: 0.3528 - 1s/epoch - 19ms/step
Epoch 8/20
57/57 - 1s - loss: 2.3194 - val_loss: 0.2989 - 1s/epoch - 18ms/step
Epoch 9/20
57/57 - 1s - loss: 2.2032 - val_loss: 0.1999 - 765ms/epoch - 13ms/step
Epoch 10/20
57/57 - 0s - loss: 1.9163 - val_loss: 0.3407 - 449ms/epoch - 8ms/step
Epoch 11/20
57/57 - 1s - loss: 1.8562 - val_loss: 0.3737 - 737ms/epoch - 13ms/step
Epoch 12/20
57/57 - 1s - loss: 1.7622 - val_loss: 0.2780 - 735ms/epoch - 13ms/step
Epoch 13/20
57/57 - 0s - loss: 1.7513 - val_loss: 0.2511 - 457ms/epoch - 8ms/step
Epoch 14/20
57/57 - 1s - loss: 1.7762 - val_loss: 0.3243 - 757ms/epoch - 13ms/step
Epoch 15/20
57/57 - 1s - loss: 1.6476 - val_loss: 0.2067 - 928ms/epoch - 16ms/step
Epoch 16/20
57/57 - 1s - loss: 1.6757 - val_loss: 0.4440 - 1s/epoch - 18ms/step
Epoch 17/20
57/57 - 1s - loss: 1.6551 - val_loss: 0.4578 - 1s/epoch - 23ms/step
Epoch 18/20
57/57 - 0s - loss: 1.7856 - val_loss: 0.3688 - 449ms/epoch - 8ms/step
Epoch 19/20
57/57 - 0s - loss: 1.6913 - val_loss: 0.2022 - 460ms/epoch - 8ms/step
Epoch 20/20
57/57 - 1s - loss: 1.7090 - val_loss: 0.3874 - 1s/epoch - 18ms/step

Run completed: runs/2024-05-07T00-48-17Z

Training run 9/10 (flags = list(0.1, 32, 16, 64, 64, 0.1, "relu")) 
Using run directory runs/2024-05-07T00-48-37Z

> FLAGS<- flags(
+   flag_numeric("nodes1", 32),
+   flag_numeric("nodes2", 32),
+   flag_numeric("nodes3", 32),
+   flag_numeric("batch_size",32),
+  .... [TRUNCATED] 

> model = keras_model_sequential()

> model %>%
+   layer_dense(units = FLAGS$nodes1, activation = FLAGS$activation, input_shape = dim(carbonTrainingFinal)[2]) %>%
+   layer_dropout(rate .... [TRUNCATED] 

> model %>% compile(
+   loss="mse",
+   optimizer=optimizer_adam(lr=FLAGS$learning_rate)
+ )
WARNING:absl:At this time, the v2.11+ optimizer `tf.keras.optimizers.Adam` runs slowly on M1/M2 Macs, please use the legacy Keras optimizer instead, located at `tf.keras.optimizers.legacy.Adam`.
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
WARNING:absl:There is a known slowdown when using v2.11+ Keras optimizers on M1/M2 Macs. Falling back to the legacy Keras optimizer, i.e., `tf.keras.optimizers.legacy.Adam`.

> model %>%fit(  as.matrix(carbonTrainingFinal),
+                carbonTrainingLabels,
+                batch_size=FLAGS$batch_size,
+                .... [TRUNCATED] 
Epoch 1/20
2024-05-06 19:48:39.568393: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0059s vs `on_train_batch_end` time: 0.0098s). Check your callbacks.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0059s vs `on_train_batch_end` time: 0.0098s). Check your callbacks.
2024-05-06 19:48:41.908953: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
113/113 - 3s - loss: 20.2791 - val_loss: 0.9935 - 3s/epoch - 28ms/step
Epoch 2/20
113/113 - 2s - loss: 3.5698 - val_loss: 0.3433 - 2s/epoch - 15ms/step
Epoch 3/20
113/113 - 1s - loss: 2.7130 - val_loss: 0.2677 - 773ms/epoch - 7ms/step
Epoch 4/20
113/113 - 1s - loss: 2.1445 - val_loss: 0.1247 - 1s/epoch - 9ms/step
Epoch 5/20
113/113 - 1s - loss: 1.7315 - val_loss: 0.2425 - 759ms/epoch - 7ms/step
Epoch 6/20
113/113 - 2s - loss: 1.4882 - val_loss: 0.1598 - 2s/epoch - 15ms/step
Epoch 7/20
113/113 - 1s - loss: 1.3584 - val_loss: 0.1821 - 1s/epoch - 10ms/step
Epoch 8/20
113/113 - 1s - loss: 1.2617 - val_loss: 0.1311 - 1s/epoch - 12ms/step
Epoch 9/20
113/113 - 1s - loss: 1.1346 - val_loss: 0.1783 - 745ms/epoch - 7ms/step
Epoch 10/20
113/113 - 1s - loss: 1.0242 - val_loss: 0.1701 - 1s/epoch - 9ms/step
Epoch 11/20
113/113 - 2s - loss: 0.9074 - val_loss: 0.1820 - 2s/epoch - 19ms/step
Epoch 12/20
113/113 - 1s - loss: 0.8492 - val_loss: 0.1924 - 768ms/epoch - 7ms/step
Epoch 13/20
113/113 - 2s - loss: 0.9142 - val_loss: 0.1555 - 2s/epoch - 14ms/step
Epoch 14/20
113/113 - 1s - loss: 0.9461 - val_loss: 0.2216 - 742ms/epoch - 7ms/step
Epoch 15/20
113/113 - 1s - loss: 0.9353 - val_loss: 0.2312 - 1s/epoch - 9ms/step
Epoch 16/20
113/113 - 1s - loss: 0.8900 - val_loss: 0.2293 - 738ms/epoch - 7ms/step
Epoch 17/20
113/113 - 1s - loss: 0.8228 - val_loss: 0.2521 - 1s/epoch - 9ms/step
Epoch 18/20
113/113 - 2s - loss: 0.7746 - val_loss: 0.3063 - 2s/epoch - 14ms/step
Epoch 19/20
113/113 - 1s - loss: 0.6999 - val_loss: 0.6246 - 925ms/epoch - 8ms/step
Epoch 20/20
113/113 - 1s - loss: 0.7131 - val_loss: 0.3272 - 775ms/epoch - 7ms/step

Run completed: runs/2024-05-07T00-48-37Z

Training run 10/10 (flags = list(0.5, 64, 128, 128, 64, 0.4, "relu")) 
Using run directory runs/2024-05-07T00-49-04Z

> FLAGS<- flags(
+   flag_numeric("nodes1", 32),
+   flag_numeric("nodes2", 32),
+   flag_numeric("nodes3", 32),
+   flag_numeric("batch_size",32),
+  .... [TRUNCATED] 

> model = keras_model_sequential()

> model %>%
+   layer_dense(units = FLAGS$nodes1, activation = FLAGS$activation, input_shape = dim(carbonTrainingFinal)[2]) %>%
+   layer_dropout(rate .... [TRUNCATED] 

> model %>% compile(
+   loss="mse",
+   optimizer=optimizer_adam(lr=FLAGS$learning_rate)
+ )
WARNING:absl:At this time, the v2.11+ optimizer `tf.keras.optimizers.Adam` runs slowly on M1/M2 Macs, please use the legacy Keras optimizer instead, located at `tf.keras.optimizers.legacy.Adam`.
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
WARNING:absl:There is a known slowdown when using v2.11+ Keras optimizers on M1/M2 Macs. Falling back to the legacy Keras optimizer, i.e., `tf.keras.optimizers.legacy.Adam`.

> model %>%fit(  as.matrix(carbonTrainingFinal),
+                carbonTrainingLabels,
+                batch_size=FLAGS$batch_size,
+                .... [TRUNCATED] 
Epoch 1/20
2024-05-06 19:49:04.833257: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0076s vs `on_train_batch_end` time: 0.0106s). Check your callbacks.
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0076s vs `on_train_batch_end` time: 0.0106s). Check your callbacks.
2024-05-06 19:49:08.310975: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
113/113 - 5s - loss: 10.5479 - val_loss: 1.4272 - 5s/epoch - 41ms/step
Epoch 2/20
113/113 - 1s - loss: 2.3952 - val_loss: 1.1559 - 1s/epoch - 13ms/step
Epoch 3/20
113/113 - 1s - loss: 1.5941 - val_loss: 1.0406 - 765ms/epoch - 7ms/step
Epoch 4/20
113/113 - 1s - loss: 1.2588 - val_loss: 1.2104 - 1s/epoch - 12ms/step
Epoch 5/20
113/113 - 1s - loss: 1.1194 - val_loss: 1.2557 - 807ms/epoch - 7ms/step
Epoch 6/20
113/113 - 2s - loss: 0.9635 - val_loss: 0.8202 - 2s/epoch - 16ms/step
Epoch 7/20
113/113 - 1s - loss: 0.7808 - val_loss: 1.2612 - 1s/epoch - 12ms/step
Epoch 8/20
113/113 - 1s - loss: 0.6893 - val_loss: 0.9310 - 792ms/epoch - 7ms/step
Epoch 9/20
113/113 - 1s - loss: 0.8263 - val_loss: 0.8854 - 1s/epoch - 12ms/step
Epoch 10/20
113/113 - 1s - loss: 0.8245 - val_loss: 1.9842 - 773ms/epoch - 7ms/step
Epoch 11/20
113/113 - 2s - loss: 0.6097 - val_loss: 1.6098 - 2s/epoch - 14ms/step
Epoch 12/20
113/113 - 1s - loss: 0.6071 - val_loss: 0.5681 - 1s/epoch - 12ms/step
Epoch 13/20
113/113 - 2s - loss: 1.0043 - val_loss: 2.7418 - 2s/epoch - 13ms/step
Epoch 14/20
113/113 - 1s - loss: 0.9712 - val_loss: 1.6307 - 756ms/epoch - 7ms/step
Epoch 15/20
113/113 - 1s - loss: 1.3167 - val_loss: 6.5994 - 1s/epoch - 12ms/step
Epoch 16/20
113/113 - 1s - loss: 1.0642 - val_loss: 3.7203 - 752ms/epoch - 7ms/step
Epoch 17/20
113/113 - 1s - loss: 1.9458 - val_loss: 3.8968 - 1s/epoch - 9ms/step
Epoch 18/20
113/113 - 1s - loss: 1.0726 - val_loss: 5.1104 - 741ms/epoch - 7ms/step
Epoch 19/20
113/113 - 1s - loss: 1.5860 - val_loss: 3.0719 - 1s/epoch - 9ms/step
Epoch 20/20
113/113 - 1s - loss: 1.7421 - val_loss: 6.8720 - 762ms/epoch - 7ms/step

Run completed: runs/2024-05-07T00-49-04Z

Runs

runs=runs[order(runs$metric_val_loss),]
runs
Data frame: 10 x 25 
# ... with 22 more columns:
#   flag_nodes1, flag_nodes2, flag_nodes3, flag_batch_size, flag_activation, flag_learning_rate, flag_dropout, epochs, epochs_completed, metrics,
#   model, loss_function, optimizer, learning_rate, script, start, end, completed, output, source_code, context, type
view_run(runs$run_dir[1])
Warning: incomplete final line found on '/var/folders/lw/zymjkl5d1g34b21y_8l475p80000gn/T//RtmpFb0Bja/file719e16574c3f/source/carbonEmission.R'Warning: incomplete final line found on '/var/folders/lw/zymjkl5d1g34b21y_8l475p80000gn/T//RtmpFb0Bja/file719e16574c3f/source/CarbonEmission.R'
dim(carbonTrainingFinal)
[1] 7202   75
dim(carbonValidationFinal)
[1] 799  75
carbonTrainingFinal<-rbind(carbonTrainingFinal,carbonValidationFinal)
carbonTrainingLabels<-c(carbonTrainingLabels,carbonValidationLabels)
dim(carbonTrainingFinal)
[1] 8001   75
BestModel<-keras_model_sequential()%>%
  layer_dense(units = 128,activation = "relu",input_shape = dim(carbonTrainingFinal)[2])%>%
  layer_dropout(rate=0.3)%>%
  layer_dense(units = 8,activation = "relu")%>%
  layer_dropout(rate=0.3)%>%
  layer_dense(units = 128,activation = "relu")%>%
  layer_dropout(rate=0.3)%>%
  layer_dense(units = 1)

BestModel %>% compile(
  loss="mse",
  optimizer=optimizer_adam(lr=0.001)
)
WARNING:absl:At this time, the v2.11+ optimizer `tf.keras.optimizers.Adam` runs slowly on M1/M2 Macs, please use the legacy Keras optimizer instead, located at `tf.keras.optimizers.legacy.Adam`.
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
WARNING:absl:There is a known slowdown when using v2.11+ Keras optimizers on M1/M2 Macs. Falling back to the legacy Keras optimizer, i.e., `tf.keras.optimizers.legacy.Adam`.
history<-BestModel %>% fit(as.matrix(carbonTrainingFinal),
                       carbonTrainingLabels,
                       batch_size=128,
                       epochs=20,
                       validation_data=list(as.matrix(carbonTestingFinal),carbonTestingLabels)
                         )
Epoch 1/20
2024-05-06 19:50:10.098149: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.

 1/63 [..............................] - ETA: 32s - loss: 56.1525
 7/63 [==>...........................] - ETA: 0s - loss: 50.6973 
14/63 [=====>........................] - ETA: 0s - loss: 43.2697
24/63 [==========>...................] - ETA: 0s - loss: 32.1539
31/63 [=============>................] - ETA: 0s - loss: 27.5672
39/63 [=================>............] - ETA: 0s - loss: 23.9154
49/63 [======================>.......] - ETA: 0s - loss: 20.7494
58/63 [==========================>...] - ETA: 0s - loss: 18.6393
63/63 [==============================] - 1s 9ms/step - loss: 17.7316
2024-05-06 19:50:10.989452: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.

63/63 [==============================] - 2s 20ms/step - loss: 17.7316 - val_loss: 0.3594
Epoch 2/20

 1/63 [..............................] - ETA: 0s - loss: 6.0446
 8/63 [==>...........................] - ETA: 0s - loss: 6.1043
18/63 [=======>......................] - ETA: 0s - loss: 5.9030
26/63 [===========>..................] - ETA: 0s - loss: 5.7035
33/63 [==============>...............] - ETA: 0s - loss: 5.5879
41/63 [==================>...........] - ETA: 0s - loss: 5.4225
50/63 [======================>.......] - ETA: 0s - loss: 5.2888
60/63 [===========================>..] - ETA: 0s - loss: 5.1492
63/63 [==============================] - 0s 6ms/step - loss: 5.1037

63/63 [==============================] - 1s 9ms/step - loss: 5.1037 - val_loss: 0.4893
Epoch 3/20

 1/63 [..............................] - ETA: 0s - loss: 3.7968
 6/63 [=>............................] - ETA: 0s - loss: 4.2288
14/63 [=====>........................] - ETA: 0s - loss: 4.3073
23/63 [=========>....................] - ETA: 0s - loss: 4.1040
32/63 [==============>...............] - ETA: 0s - loss: 4.0073
42/63 [===================>..........] - ETA: 0s - loss: 3.9244
51/63 [=======================>......] - ETA: 0s - loss: 3.9132
60/63 [===========================>..] - ETA: 0s - loss: 3.8433
63/63 [==============================] - 0s 6ms/step - loss: 3.8226

63/63 [==============================] - 1s 9ms/step - loss: 3.8226 - val_loss: 0.3493
Epoch 4/20

 1/63 [..............................] - ETA: 0s - loss: 3.3050
 7/63 [==>...........................] - ETA: 0s - loss: 3.3747
16/63 [======>.......................] - ETA: 0s - loss: 3.3454
24/63 [==========>...................] - ETA: 0s - loss: 3.2853
34/63 [===============>..............] - ETA: 0s - loss: 3.2063
42/63 [===================>..........] - ETA: 0s - loss: 3.1480
49/63 [======================>.......] - ETA: 0s - loss: 3.0933
58/63 [==========================>...] - ETA: 0s - loss: 3.0464
63/63 [==============================] - 0s 6ms/step - loss: 3.0180

63/63 [==============================] - 1s 9ms/step - loss: 3.0180 - val_loss: 0.3182
Epoch 5/20

 1/63 [..............................] - ETA: 0s - loss: 2.6332
 8/63 [==>...........................] - ETA: 0s - loss: 2.6578
17/63 [=======>......................] - ETA: 0s - loss: 2.6075
27/63 [===========>..................] - ETA: 0s - loss: 2.5429
37/63 [================>.............] - ETA: 0s - loss: 2.4747
47/63 [=====================>........] - ETA: 0s - loss: 2.4145
57/63 [==========================>...] - ETA: 0s - loss: 2.3543
63/63 [==============================] - 0s 6ms/step - loss: 2.3245

63/63 [==============================] - 1s 8ms/step - loss: 2.3245 - val_loss: 0.2665
Epoch 6/20

 1/63 [..............................] - ETA: 0s - loss: 2.1989
 8/63 [==>...........................] - ETA: 0s - loss: 1.9025
18/63 [=======>......................] - ETA: 0s - loss: 1.8597
27/63 [===========>..................] - ETA: 0s - loss: 1.7908
37/63 [================>.............] - ETA: 0s - loss: 1.7341
47/63 [=====================>........] - ETA: 0s - loss: 1.6634
57/63 [==========================>...] - ETA: 0s - loss: 1.6058
63/63 [==============================] - 0s 6ms/step - loss: 1.5699

63/63 [==============================] - 1s 8ms/step - loss: 1.5699 - val_loss: 0.3017
Epoch 7/20

 1/63 [..............................] - ETA: 0s - loss: 1.2788
 9/63 [===>..........................] - ETA: 0s - loss: 1.2298
18/63 [=======>......................] - ETA: 0s - loss: 1.2152
28/63 [============>.................] - ETA: 0s - loss: 1.1812
38/63 [=================>............] - ETA: 0s - loss: 1.1151
48/63 [=====================>........] - ETA: 0s - loss: 1.0783
58/63 [==========================>...] - ETA: 0s - loss: 1.0496
63/63 [==============================] - 0s 6ms/step - loss: 1.0329

63/63 [==============================] - 1s 8ms/step - loss: 1.0329 - val_loss: 0.4110
Epoch 8/20

 1/63 [..............................] - ETA: 0s - loss: 0.9057
 8/63 [==>...........................] - ETA: 0s - loss: 0.8505
18/63 [=======>......................] - ETA: 0s - loss: 0.7783
27/63 [===========>..................] - ETA: 0s - loss: 0.7639
36/63 [================>.............] - ETA: 0s - loss: 0.7573
46/63 [====================>.........] - ETA: 0s - loss: 0.7394
55/63 [=========================>....] - ETA: 0s - loss: 0.7227
63/63 [==============================] - 0s 6ms/step - loss: 0.7049

63/63 [==============================] - 1s 9ms/step - loss: 0.7049 - val_loss: 0.3024
Epoch 9/20

 1/63 [..............................] - ETA: 0s - loss: 0.6317
 8/63 [==>...........................] - ETA: 0s - loss: 0.6003
18/63 [=======>......................] - ETA: 0s - loss: 0.6017
28/63 [============>.................] - ETA: 0s - loss: 0.6022
38/63 [=================>............] - ETA: 0s - loss: 0.5921
48/63 [=====================>........] - ETA: 0s - loss: 0.5811
58/63 [==========================>...] - ETA: 0s - loss: 0.5761
63/63 [==============================] - 0s 6ms/step - loss: 0.5716

63/63 [==============================] - 1s 9ms/step - loss: 0.5716 - val_loss: 0.4165
Epoch 10/20

 1/63 [..............................] - ETA: 0s - loss: 0.4257
 7/63 [==>...........................] - ETA: 0s - loss: 0.5176
16/63 [======>.......................] - ETA: 0s - loss: 0.5310
26/63 [===========>..................] - ETA: 0s - loss: 0.5257
35/63 [===============>..............] - ETA: 0s - loss: 0.5265
45/63 [====================>.........] - ETA: 0s - loss: 0.5175
54/63 [========================>.....] - ETA: 0s - loss: 0.5178
62/63 [============================>.] - ETA: 0s - loss: 0.5170
63/63 [==============================] - 0s 6ms/step - loss: 0.5155

63/63 [==============================] - 1s 9ms/step - loss: 0.5155 - val_loss: 0.4760
Epoch 11/20

 1/63 [..............................] - ETA: 0s - loss: 0.5481
 8/63 [==>...........................] - ETA: 0s - loss: 0.5009
18/63 [=======>......................] - ETA: 0s - loss: 0.4864
28/63 [============>.................] - ETA: 0s - loss: 0.4712
37/63 [================>.............] - ETA: 0s - loss: 0.4750
47/63 [=====================>........] - ETA: 0s - loss: 0.4799
58/63 [==========================>...] - ETA: 0s - loss: 0.4830
63/63 [==============================] - 0s 6ms/step - loss: 0.4824

63/63 [==============================] - 1s 8ms/step - loss: 0.4824 - val_loss: 0.4617
Epoch 12/20

 1/63 [..............................] - ETA: 0s - loss: 0.4498
 8/63 [==>...........................] - ETA: 0s - loss: 0.4512
18/63 [=======>......................] - ETA: 0s - loss: 0.4672
27/63 [===========>..................] - ETA: 0s - loss: 0.4586
37/63 [================>.............] - ETA: 0s - loss: 0.4754
46/63 [====================>.........] - ETA: 0s - loss: 0.4858
54/63 [========================>.....] - ETA: 0s - loss: 0.4884
63/63 [==============================] - 0s 6ms/step - loss: 0.4905

63/63 [==============================] - 1s 9ms/step - loss: 0.4905 - val_loss: 0.4083
Epoch 13/20

 1/63 [..............................] - ETA: 0s - loss: 0.4547
 8/63 [==>...........................] - ETA: 0s - loss: 0.4782
17/63 [=======>......................] - ETA: 0s - loss: 0.4875
27/63 [===========>..................] - ETA: 0s - loss: 0.4907
37/63 [================>.............] - ETA: 0s - loss: 0.4954
45/63 [====================>.........] - ETA: 0s - loss: 0.4970
55/63 [=========================>....] - ETA: 0s - loss: 0.5106
63/63 [==============================] - 0s 6ms/step - loss: 0.5178

63/63 [==============================] - 1s 9ms/step - loss: 0.5178 - val_loss: 0.3120
Epoch 14/20

 1/63 [..............................] - ETA: 0s - loss: 0.4834
 9/63 [===>..........................] - ETA: 0s - loss: 0.6559
19/63 [========>.....................] - ETA: 0s - loss: 0.6414
29/63 [============>.................] - ETA: 0s - loss: 0.6487
39/63 [=================>............] - ETA: 0s - loss: 0.6533
49/63 [======================>.......] - ETA: 0s - loss: 0.6356
59/63 [===========================>..] - ETA: 0s - loss: 0.6320
63/63 [==============================] - 0s 5ms/step - loss: 0.6281

63/63 [==============================] - 1s 8ms/step - loss: 0.6281 - val_loss: 0.5378
Epoch 15/20

 1/63 [..............................] - ETA: 0s - loss: 0.6601
 9/63 [===>..........................] - ETA: 0s - loss: 0.6684
17/63 [=======>......................] - ETA: 0s - loss: 0.7051
27/63 [===========>..................] - ETA: 0s - loss: 0.6947
37/63 [================>.............] - ETA: 0s - loss: 0.6975
47/63 [=====================>........] - ETA: 0s - loss: 0.7048
56/63 [=========================>....] - ETA: 0s - loss: 0.7048
63/63 [==============================] - 0s 6ms/step - loss: 0.7108

63/63 [==============================] - 1s 8ms/step - loss: 0.7108 - val_loss: 0.2581
Epoch 16/20

 1/63 [..............................] - ETA: 0s - loss: 0.7801
 8/63 [==>...........................] - ETA: 0s - loss: 0.6919
18/63 [=======>......................] - ETA: 0s - loss: 0.7125
27/63 [===========>..................] - ETA: 0s - loss: 0.7137
37/63 [================>.............] - ETA: 0s - loss: 0.7245
47/63 [=====================>........] - ETA: 0s - loss: 0.7272
56/63 [=========================>....] - ETA: 0s - loss: 0.7218
63/63 [==============================] - 0s 6ms/step - loss: 0.7238

63/63 [==============================] - 1s 8ms/step - loss: 0.7238 - val_loss: 0.7199
Epoch 17/20

 1/63 [..............................] - ETA: 0s - loss: 0.6796
 9/63 [===>..........................] - ETA: 0s - loss: 0.6985
19/63 [========>.....................] - ETA: 0s - loss: 0.7236
28/63 [============>.................] - ETA: 0s - loss: 0.7255
38/63 [=================>............] - ETA: 0s - loss: 0.7247
48/63 [=====================>........] - ETA: 0s - loss: 0.7379
58/63 [==========================>...] - ETA: 0s - loss: 0.7495
63/63 [==============================] - 0s 6ms/step - loss: 0.7535

63/63 [==============================] - 1s 8ms/step - loss: 0.7535 - val_loss: 0.7899
Epoch 18/20

 1/63 [..............................] - ETA: 0s - loss: 0.7956
 9/63 [===>..........................] - ETA: 0s - loss: 0.8208
19/63 [========>.....................] - ETA: 0s - loss: 0.8132
29/63 [============>.................] - ETA: 0s - loss: 0.8473
39/63 [=================>............] - ETA: 0s - loss: 0.8946
49/63 [======================>.......] - ETA: 0s - loss: 0.9325
59/63 [===========================>..] - ETA: 0s - loss: 0.9543
63/63 [==============================] - 0s 6ms/step - loss: 0.9532

63/63 [==============================] - 1s 8ms/step - loss: 0.9532 - val_loss: 1.1574
Epoch 19/20

 1/63 [..............................] - ETA: 0s - loss: 0.9332
 9/63 [===>..........................] - ETA: 0s - loss: 1.0345
19/63 [========>.....................] - ETA: 0s - loss: 1.0831
29/63 [============>.................] - ETA: 0s - loss: 1.0410
39/63 [=================>............] - ETA: 0s - loss: 1.0398
49/63 [======================>.......] - ETA: 0s - loss: 1.0370
59/63 [===========================>..] - ETA: 0s - loss: 1.0292
63/63 [==============================] - 0s 5ms/step - loss: 1.0303

63/63 [==============================] - 1s 9ms/step - loss: 1.0303 - val_loss: 0.8911
Epoch 20/20

 1/63 [..............................] - ETA: 0s - loss: 0.8411
 8/63 [==>...........................] - ETA: 0s - loss: 0.9346
18/63 [=======>......................] - ETA: 0s - loss: 0.9226
28/63 [============>.................] - ETA: 0s - loss: 0.9545
38/63 [=================>............] - ETA: 0s - loss: 1.0284
48/63 [=====================>........] - ETA: 0s - loss: 1.0479
58/63 [==========================>...] - ETA: 0s - loss: 1.0510
63/63 [==============================] - 0s 6ms/step - loss: 1.0448

63/63 [==============================] - 1s 8ms/step - loss: 1.0448 - val_loss: 1.6152
predictBestModel<-model %>% predict(as.matrix(carbonTestingFinal))
2024-05-06 19:50:31.488130: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.

 1/63 [..............................] - ETA: 4s
11/63 [====>.........................] - ETA: 0s
32/63 [==============>...............] - ETA: 0s
58/63 [==========================>...] - ETA: 0s
63/63 [==============================] - 0s 3ms/step

63/63 [==============================] - 0s 3ms/step
rmse=function(x,y){
  return((mean(x-y)^2)^0.5)
}

rmse(predictBestModel,carbonTestingLabels)
[1] 0.2827712
MAE(predictBestModel,carbonTestingLabels)
[1] 0.3099322
rsquaredBest<-sum((predictBestModel-carbonTestingLabels)^2)/sum((carbonTestingLabels-mean(carbonTestingLabels))^2)
rsquaredBest
[1] 0.7408867
---
title: "Project"
output: html_notebook
---
# Loading the data
```{r}
carbonData<-read.csv('/Users/angadsingh/Downloads/Carbon Emission.csv')
summary(carbonData)
```

```{r}
str(carbonData)
```
From the str of carbon data i can see that i am having empty vehicle types as "" so i will replace them with No vehicle
```{r}

carbonData$Vehicle.Type[carbonData$Transport=='public'|carbonData$Transport=='walk/bicycle']<-'FuelEfficient'
#carbonData<- carbonData %>% mutate(Vehicle.Type=ifelse(Vehicle.Type=="","No vehicle",Vehicle.Type))
str(carbonData)
```
```{r}
#carbonData[carbonData == ""]<-NA
colSums(is.na(carbonData))
```


```{r}
library(dplyr)
carbonData<-carbonData %>%
  mutate_if(is.character, as.factor)%>%
  mutate_if(is.integer, as.numeric)

str(carbonData)
summary(carbonData)
```





```{r}
table(carbonData$Body.Type)
table(carbonData$Sex)
table(carbonData$Diet)
table(carbonData$How.Often.Shower)
table(carbonData$Heating.Energy.Source)
table(carbonData$Transport)
table(carbonData$Social.Activity)
table(carbonData$Frequency.of.Traveling.by.Air)
table(carbonData$Waste.Bag.Size)
table(carbonData$Energy.efficiency)
```


```{r}
hist(carbonData$CarbonEmission)
carbonData$CarbonEmission<-log(carbonData$CarbonEmission) 
hist(carbonData$CarbonEmission)
```
```{r}
carbonIndices<-which(names(carbonData)=='CarbonEmission')
for (c in colnames(carbonData[,-carbonIndices])) {
  if(is.factor(carbonData[,c])){
    try({
        anovaaResult<-aov(carbonData$CarbonEmission~carbonData[,c])
        cat("ANOVA of ",c, "and CarbonEmission", "\n")
        print(summary(anovaaResult))
        boxplot(carbonData$CarbonEmission~carbonData[,c],shade=TRUE, main = paste("Carbon Emission vs", c), xlab ="CarbonEmission", ylab=c ,col="lightgreen")
        
      })
  }
  else if (is.numeric(carbonData[,c])){
    try({
      corTest<-cor.test(carbonData$CarbonEmission,carbonData[,c], method = "pearson")
      cat("p.value of ",c, "and Carbon Emission", corTest$p.value, "\n")
      plot(carbonData$CarbonEmission,carbonData[,c], main = paste("Carbon Emission vs", c), xlab ="Carbon Emission", ylab=c)
    })
  }
  
}
```

```{r}
library(tidyverse)
parseList<-function(x,emptyLable){
  parsedItem<-str_remove_all(x,"\\[|\\]|'")%>%
    strsplit(", ")%>%
    unlist()
  
  if(length(parsedItem)==0||all(parsedItem=="")){
    parsedItem<-emptyLable
  }
  return(parsedItem)
}
carbonData$Recycling<-sapply(carbonData$Recycling,parseList,emptyLable="No_Recycling")
carbonData$Cooking_With<-sapply(carbonData$Cooking_With,parseList,emptyLable="No_Cooking")

carbonData$Recycling<-sapply(carbonData$Recycling,paste,collapse=",")
carbonData$Cooking_With<-sapply(carbonData$Cooking_With,paste,collapse=",")

#str(carbonData)

dummies<-function(col){

  items<-unlist(str_split(col,","))
  items<-trimws(items)
  items<-items[items != ""]
  
  uniqueItems<-unique(c(items))
  dummyDataFrame<-data.frame(matrix(0,nrow = length(col),ncol = length(uniqueItems)))
  colnames(dummyDataFrame)<-uniqueItems
  
  for (i in seq_along(col)) {
    rowItems<-unlist(str_split(col[i],","))%>%
    map_chr(~str_trim(.))%>%
    discard(~.=="")
    
    rowItems<-rowItems[rowItems %in% uniqueItems]
    dummyDataFrame[i,rowItems]<-1
  }
  return(dummyDataFrame)
}

recyclingDummies<-dummies(carbonData$Recycling)
cookingDummies<-dummies(carbonData$Cooking_With)


carbonData<-cbind(carbonData,recyclingDummies,cookingDummies)

carbonData$Recycling<- NULL
carbonData$Cooking_With<-NULL

str(carbonData)
```


```{r}
library(caret)

carbonDataIndexs <- createDataPartition(carbonData$CarbonEmission, p=0.8, list=FALSE)

carbonTrainData<-carbonData[carbonDataIndexs,]
carbonTrainData

carbonTestData<-carbonData[-carbonDataIndexs,]
carbonTestData

carbonTestLabels<-carbonTestData$CarbonEmission
```
#Benchmark
```{r}
meanTransport<- carbonTrainData %>% #this will calculate emission per transport type 
  group_by(Transport) %>%
  summarize(meanEmission= mean(CarbonEmission, na.rm = TRUE))

meanTransportTable<-setNames(meanTransport$meanEmission,meanTransport$Transport)  # will store transport levels to its average emission

predictTransport<-function(row){
  transportType<-as.character(row["Transport"])
  if(transportType %in% names(meanTransportTable)){
    return(meanTransportTable[[transportType]])
  }
}

benchmarkPred<-apply(carbonTestData,1,predictTransport) # will apply predictTransport function on the test data


rmse(benchmarkPred,carbonTestData$CarbonEmission)
MAE(benchmarkPred,carbonTestData$CarbonEmission)
rsquaredNew<-sum((benchmarkPred-carbonTestData$CarbonEmission)^2)/sum((carbonTestData$CarbonEmission-mean(carbonTestData$CarbonEmission))^2)
rsquaredNew
```
#KNN Model
```{r}
knnModel<-train(CarbonEmission~.,data = carbonTrainData, method="knn", trControl=trainControl(method = "cv", number=5))
```
```{r}
knnModel
```
```{r}
knnPred<-predict(knnModel,newdata = carbonTestData)

rmse=function(x,y){
  return((mean(x-y)^2)^0.5)
}
rmse(knnPred,carbonTestLabels)
```
```{r}
lmModel<-train(CarbonEmission~.,data = carbonTrainData, method="lm", trControl=trainControl(method = "cv", number=5))
lmModel
```
```{r}
summary(lmModel)
```
```{r}
stepwiseModel<-train(CarbonEmission~.,data = carbonTrainData, method="leapBackward", trControl=trainControl(method = "cv", number=5))
stepwiseModel
```
```{r}
summary(stepwiseModel$finalModel)
```

#Lasso Model
```{r}
library(glmnet)
set.seed(1)
lassoModel<-train(CarbonEmission~.,data = carbonTrainData,method="glmnet",trControl= trainControl(method = "cv", number=5), tuneGrid = expand.grid(alpha=1, lambda=10^seq(-3,3,length=100))) 

lassoModel


lassoLambda<-lassoModel$bestTune$lambda
lassoPredictor<- setdiff(names(carbonTrainData),"CarbonEmission")
lassoFinalModel<-glmnet(as.matrix(carbonTrainData[,lassoPredictor]),carbonTrainData[,"CarbonEmission"],alpha = 1,lambda = lassoLambda, family = "gaussian")

coeff<-coef(lassoFinalModel)
coeff

zeroCoeff<-coeff==0
zeroCoeff
```
```{r}
plot(lassoModel)
```


#Ridge Model
```{r}
set.seed(1)
ridgeModel<-train(CarbonEmission~.,data = carbonTrainData,method="glmnet",trControl= trainControl(method = "cv", number=5), tuneGrid = expand.grid(alpha=0, lambda=10^seq(-3,3,length=100))) 

ridgeModel

ridgeLambda<-ridgeModel$bestTune$lambda
ridgePredictor<- setdiff(names(carbonTrainData),"CarbonEmission")
ridgeFinalModel<-glmnet(as.matrix(carbonTrainData[,ridgePredictor]),carbonTrainData[,"CarbonEmission"],alpha = 1,lambda = ridgeLambda, family = "gaussian")

ridgeFinalModel
```
```{r}
plot(ridgeModel)
```

#Elastic Net Model
```{r}
set.seed(1)
enetModel<-train(CarbonEmission~., data = carbonTrainData, method = "glmnet", trControl=trainControl(method="cv",number=5,preProc="nzv"),tuneGrid=expand.grid(alpha=seq(0,1,length=10),lambda=10^seq(-3,1,length=100)))

enetModel

enetModel$bestTune
```
```{r}
enetLambda<-enetModel$bestTune$lambda
enetAlpha<-enetModel$bestTune$alpha

enetPredector<-setdiff(names(carbonTrainData),"CarbonEmission")

enetFinalModel<-glmnet(as.matrix(carbonTrainData[,enetPredector]),carbonTrainData[,"CarbonEmission"], alpha = enetAlpha,lambda = enetLambda, family = "gaussian")

enetFinalModel
```

```{r}
carbonTrainData$`No Recycling`
```


#Random Forest Model
```{r}
library(randomForest)
set.seed(1)
randomForestModel<-randomForest(CarbonEmission~.,data = carbonTrainData)
randomForestModel
```

```{r}
mRf<-train(CarbonEmission~.,
           data=carbonTrainData,
           method="rf",
           trControl=trainControl(method = "cv", number =5)
           )
```

```{r}
mRf
varImp(mRf)
```
```{r}
rfPred<-predict(mRf,newdata = carbonTestData)

MAE(carbonTestData$CarbonEmission,rfPred)
rmse(carbonTestData$CarbonEmission,rfPred)
cor(carbonTestData$CarbonEmission,rfPred)^2
```

```{r}
plot(carbonTestData$CarbonEmission,rfPred)
```
# GBM
```{r}
set.seed(1)

grBoostedTree<-train(
  CarbonEmission~.,
  data = carbonTrainData,
  method="gbm",
  trControl=trainControl(method = "cv",number = 5)
)
```
```{r}
grBoostedTree

gbmPred<-predict(grBoostedTree, carbonTestData)
```
```{r}
MAE(carbonTestData$CarbonEmission,gbmPred)
rmse(carbonTestData$CarbonEmission,gbmPred)
cor(carbonTestData$CarbonEmission,gbmPred)^2
```
```{r}
plot(carbonTestData$CarbonEmission,gbmPred)
```
#SV Linear Model
```{r}
set.seed(1)

svmLinear<-train(
  CarbonEmission~.,
  data = carbonTrainData,
  method="svmLinear",
  trControl=trainControl(method = "cv",number = 5, preProc=c("center","scale"))
)
```
```{r}
svmLinear

svmPred<-predict(svmLinear,carbonTestData)

plot(svmPred,carbonTestData$CarbonEmission)
```
#SVM Radial Model
```{r}
set.seed(1)

svmRadial<-train(
  CarbonEmission~.,
  data = carbonTrainData,
  method="svmRadial",
  trControl=trainControl(method = "cv",number = 5, preProc=c("center","scale"))
)
```

```{r}
svmRadial

svmRadialPred<-predict(svmRadial,carbonTestData)

plot(svmRadialPred,carbonTestData$CarbonEmission)
```
#Comparing models 
```{r}
compare=resamples(list(KNN=knnModel,LIN=lmModel,stepWise=stepwiseModel,Lasso=lassoModel,Ridge=ridgeModel,Enet=enetModel,RF=mRf,GBM=grBoostedTree,SVML=svmLinear,SVMR=svmRadial))
summary(compare) # Out of all the models SVM Radial stands out the most
```

# Neural Network Preprocessing
```{r}
library(caret)
carbonInd<-createDataPartition(carbonTrainData$CarbonEmission,p=0.9,list = FALSE)
carbonIndex<-which(names(carbonTrainData)=='CarbonEmission')

carbonTrainingData<-carbonTrainData[carbonInd,-carbonIndex]
str(carbonTrainingData)

carbonTrainingLabels<-carbonTrainData[carbonInd,carbonIndex]
str(carbonTrainingLabels)

carbonValidationData<-carbonTrainData[-carbonInd,-carbonIndex]
carbonValidationData

carbonValidationLabels<-carbonTrainData[-carbonInd,carbonIndex]
str(carbonValidationLabels)

carbonTestingData<-carbonTestData[,-carbonIndex]
carbonTestingData

carbonTestingLabels<-carbonTestData[,carbonIndex]
str(carbonTestingLabels)
```
```{r}
dim(carbonTrainingData)
dim(carbonTestingData)
```


#Scaling numeric Variables and one hot encoding categorical variables
```{r}
library(mltools)
library(data.table)
numericCols<-c("Monthly.Grocery.Bill","Vehicle.Monthly.Distance.Km","Waste.Bag.Weekly.Count",
               "How.Long.TV.PC.Daily.Hour","How.Many.New.Clothes.Monthly","How.Long.Internet.Daily.Hour","Metal","Paper","Plastic","Glass","Stove","Oven"
               ,"Microwave","Grill","Airfryer","No_Cooking","No_Recycling")

categoricalCols<-c("Body.Type","Sex","Diet","How.Often.Shower","Heating.Energy.Source","Transport","Vehicle.Type","Social.Activity",
                   "Frequency.of.Traveling.by.Air","Waste.Bag.Size","Energy.efficiency")

carbonTrainingDataNew<-scale(carbonTrainingData[,numericCols])
colMeanTrain<-attr(carbonTrainingDataNew,"scaled:center")
colStddevsTrain<-attr(carbonTrainingDataNew,"scaled:scale")


carbonTrainingData[,numericCols]<-carbonTrainingDataNew
carbonValidationData[,numericCols]<-scale(carbonValidationData[,numericCols],center = colMeanTrain,scale = colStddevsTrain)
carbonTestingData[,numericCols]<-scale(carbonTestingData[,numericCols],center = colMeanTrain,scale = colStddevsTrain)

carbonTrainingTable<-as.data.table(carbonTrainingData)
carbonValidationTable<-as.data.table(carbonValidationData)
carbonTestingTable<-as.data.table(carbonTestingData)

carbonTrainingOneHot<-one_hot(carbonTrainingTable,naCols=FALSE,dropCols=TRUE,dropUnusedLevels=TRUE)
carbonTrainingOneHot

carbonValidationOneHot<-one_hot(carbonValidationTable,naCols=FALSE,dropCols=TRUE,dropUnusedLevels=TRUE)
carbonValidationOneHot

carbonTestingOneHot<-one_hot(carbonTestingTable,naCols=FALSE,dropCols=TRUE,dropUnusedLevels=TRUE)
carbonTestingOneHot

carbonTrainingFinal<-as.data.frame(cbind(carbonTrainingTable[, ..numericCols], carbonTrainingOneHot))
carbonTrainingFinal

carbonValidationFinal<-as.data.frame(cbind(carbonValidationTable[, ..numericCols], carbonValidationOneHot))
carbonValidationFinal

carbonTestingFinal<-as.data.frame(cbind(carbonTestingTable[, ..numericCols], carbonTestingOneHot))
carbonTestingFinal
```
```{r}
library(keras)

model<-keras_model_sequential()%>%
  layer_dense(units = 32,activation = "relu",input_shape = dim(carbonTrainingFinal)[2])%>%
  layer_dropout(rate=0.3)%>%
  layer_dense(units = 32,activation = "relu")%>%
  layer_dropout(rate=0.3)%>%
  layer_dense(units = 16,activation = "relu")%>%
  layer_dropout(rate=0.3)%>%
  layer_dense(units = 1)

model %>% compile(
  loss="mse",
  optimizer=optimizer_adam(lr=0.001)
)

history<-model %>% fit(as.matrix(carbonTrainingFinal),
                       carbonTrainingLabels,
                       batch_size=50,
                       epochs=20,
                       validation_data=list(as.matrix(carbonValidationFinal),carbonValidationLabels)
                         )
```
```{r}
kerasPrediction<-model %>% predict(as.matrix(carbonTestingFinal))

rmse=function(x,y){
  return((mean(x-y)^2)^0.5)
}

rmse(kerasPrediction,carbonTestLabels)
MAE(kerasPrediction,carbonTestLabels)
rsquared<-sum((kerasPrediction-carbonTestLabels)^2)/sum((carbonTestLabels-mean(carbonTestLabels))^2)
rsquared
```
```{r}
library(tfruns)
runs<-tuning_run(
  "carbonEmission.R",
  flags=list(
    learning_rate=c(0.1,0.5,0.01,0.001),
    nodes1=c(8,16,32,64,128),
    nodes2=c(8,16,32,64,128),
    nodes3=c(8,16,32,64,128),
    batch_size=c(16,32,64,128),
    dropout=c(0.1,0.2,0.3,0.4,0.5),
    activation=c("relu")
  ),sample=0.001
)
```

# Runs
```{r}
runs=runs[order(runs$metric_val_loss),]
runs
view_run(runs$run_dir[1])
```
```{r}
dim(carbonTrainingFinal)
dim(carbonValidationFinal)
carbonTrainingFinal<-rbind(carbonTrainingFinal,carbonValidationFinal)
carbonTrainingLabels<-c(carbonTrainingLabels,carbonValidationLabels)
dim(carbonTrainingFinal)
```


```{r}
BestModel<-keras_model_sequential()%>%
  layer_dense(units = 128,activation = "relu",input_shape = dim(carbonTrainingFinal)[2])%>%
  layer_dropout(rate=0.3)%>%
  layer_dense(units = 8,activation = "relu")%>%
  layer_dropout(rate=0.3)%>%
  layer_dense(units = 128,activation = "relu")%>%
  layer_dropout(rate=0.3)%>%
  layer_dense(units = 1)

BestModel %>% compile(
  loss="mse",
  optimizer=optimizer_adam(lr=0.001)
)

history<-BestModel %>% fit(as.matrix(carbonTrainingFinal),
                       carbonTrainingLabels,
                       batch_size=128,
                       epochs=20,
                       validation_data=list(as.matrix(carbonTestingFinal),carbonTestingLabels)
                         )
```
```{r}
predictBestModel<-model %>% predict(as.matrix(carbonTestingFinal))
```
```{r}
rmse=function(x,y){
  return((mean(x-y)^2)^0.5)
}

rmse(predictBestModel,carbonTestingLabels)
MAE(predictBestModel,carbonTestingLabels)
rsquaredBest<-sum((predictBestModel-carbonTestingLabels)^2)/sum((carbonTestingLabels-mean(carbonTestingLabels))^2)
rsquaredBest
```

